From 5f77bbb702ac19a47a2897a1d4ac8ca14928205b Mon Sep 17 00:00:00 2001 From: SDKAuto Date: Thu, 6 Oct 2022 00:02:17 +0000 Subject: [PATCH] CodeGen from PR 21014 in Azure/azure-rest-api-specs Merge a4e816dbad2f543f6cdcf7ba2f6ee50b5e22c676 into 5f21d9c7e9c9f61a3de363bf3b1d0ce52a00232a --- .../_meta.json | 11 + .../mgmt/machinelearningservices/__init__.py | 15 +- .../_azure_machine_learning_services.py | 306 + .../_azure_machine_learning_workspaces.py | 121 - .../machinelearningservices/_configuration.py | 60 +- .../mgmt/machinelearningservices/_patch.py | 20 + .../machinelearningservices/_serialization.py | 1970 ++ .../mgmt/machinelearningservices/_vendor.py | 27 + .../mgmt/machinelearningservices/_version.py | 2 +- .../machinelearningservices/aio/__init__.py | 15 +- .../aio/_azure_machine_learning_services.py | 307 + .../aio/_azure_machine_learning_workspaces.py | 115 - .../aio/_configuration.py | 57 +- .../machinelearningservices/aio/_patch.py | 20 + .../aio/operations/__init__.py | 90 +- ..._machine_learning_workspaces_operations.py | 89 - .../_batch_deployments_operations.py | 888 + .../operations/_batch_endpoints_operations.py | 916 + .../operations/_code_containers_operations.py | 437 + .../operations/_code_versions_operations.py | 468 + .../_component_containers_operations.py | 446 + .../_component_versions_operations.py | 473 + .../aio/operations/_compute_operations.py | 1625 + .../operations/_data_containers_operations.py | 446 + .../operations/_data_versions_operations.py | 482 + .../aio/operations/_datastores_operations.py | 541 + .../_environment_containers_operations.py | 447 + .../_environment_versions_operations.py | 473 + .../aio/operations/_jobs_operations.py | 639 + .../operations/_labeling_jobs_operations.py | 984 + .../_machine_learning_compute_operations.py | 895 - .../_model_containers_operations.py | 450 + .../operations/_model_versions_operations.py | 499 + .../aio/operations/_notebooks_operations.py | 160 - .../_online_deployments_operations.py | 1158 + .../_online_endpoints_operations.py | 1243 + .../aio/operations/_operations.py | 127 +- .../aio/operations/_patch.py | 20 + ...private_endpoint_connections_operations.py | 532 +- .../_private_link_resources_operations.py | 132 +- .../aio/operations/_quotas_operations.py | 272 +- .../aio/operations/_registries_operations.py | 717 + .../_registry_code_containers_operations.py | 594 + .../_registry_code_versions_operations.py | 628 + ...egistry_component_containers_operations.py | 594 + ..._registry_component_versions_operations.py | 628 + ...istry_environment_containers_operations.py | 604 + ...egistry_environment_versions_operations.py | 633 + .../_registry_model_containers_operations.py | 603 + .../_registry_model_versions_operations.py | 651 + .../aio/operations/_schedules_operations.py | 582 + .../aio/operations/_usages_operations.py | 140 +- .../_virtual_machine_sizes_operations.py | 136 +- .../_workspace_connections_operations.py | 557 +- .../_workspace_features_operations.py | 143 +- .../aio/operations/_workspaces_operations.py | 1774 +- .../models/__init__.py | 1466 +- .../_azure_machine_learning_services_enums.py | 1538 + ...azure_machine_learning_workspaces_enums.py | 265 - .../machinelearningservices/models/_models.py | 3667 --- .../models/_models_py3.py | 25974 ++++++++++++++-- .../machinelearningservices/models/_patch.py | 20 + .../operations/__init__.py | 90 +- ..._machine_learning_workspaces_operations.py | 94 - .../_batch_deployments_operations.py | 1106 + .../operations/_batch_endpoints_operations.py | 1127 + .../operations/_code_containers_operations.py | 571 + .../operations/_code_versions_operations.py | 620 + .../_component_containers_operations.py | 591 + .../_component_versions_operations.py | 629 + .../operations/_compute_operations.py | 2044 ++ .../operations/_data_containers_operations.py | 588 + .../operations/_data_versions_operations.py | 641 + .../operations/_datastores_operations.py | 741 + .../_environment_containers_operations.py | 592 + .../_environment_versions_operations.py | 629 + .../operations/_jobs_operations.py | 824 + .../operations/_labeling_jobs_operations.py | 1237 + .../_machine_learning_compute_operations.py | 912 - .../_model_containers_operations.py | 595 + .../operations/_model_versions_operations.py | 672 + .../operations/_notebooks_operations.py | 166 - .../_online_deployments_operations.py | 1463 + .../_online_endpoints_operations.py | 1542 + .../operations/_operations.py | 155 +- .../operations/_patch.py | 20 + ...private_endpoint_connections_operations.py | 707 +- .../_private_link_resources_operations.py | 173 +- .../operations/_quotas_operations.py | 340 +- .../operations/_registries_operations.py | 909 + .../_registry_code_containers_operations.py | 724 + .../_registry_code_versions_operations.py | 776 + ...egistry_component_containers_operations.py | 732 + ..._registry_component_versions_operations.py | 782 + ...istry_environment_containers_operations.py | 750 + ...egistry_environment_versions_operations.py | 805 + .../_registry_model_containers_operations.py | 746 + .../_registry_model_versions_operations.py | 814 + .../operations/_schedules_operations.py | 721 + .../operations/_usages_operations.py | 177 +- .../_virtual_machine_sizes_operations.py | 172 +- .../_workspace_connections_operations.py | 736 +- .../_workspace_features_operations.py | 187 +- .../operations/_workspaces_operations.py | 2240 +- 104 files changed, 78112 insertions(+), 11920 deletions(-) create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/_meta.json create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_azure_machine_learning_services.py delete mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_azure_machine_learning_workspaces.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_patch.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_serialization.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_vendor.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_azure_machine_learning_services.py delete mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_azure_machine_learning_workspaces.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_patch.py delete mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_azure_machine_learning_workspaces_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_batch_deployments_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_batch_endpoints_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_code_containers_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_code_versions_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_component_containers_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_component_versions_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_compute_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_data_containers_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_data_versions_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_datastores_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_environment_containers_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_environment_versions_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_jobs_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_labeling_jobs_operations.py delete mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_machine_learning_compute_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_model_containers_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_model_versions_operations.py delete mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_notebooks_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_online_deployments_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_online_endpoints_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_patch.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registries_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_code_containers_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_code_versions_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_component_containers_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_component_versions_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_environment_containers_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_environment_versions_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_model_containers_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_model_versions_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_schedules_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_azure_machine_learning_services_enums.py delete mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py delete mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_patch.py delete mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_azure_machine_learning_workspaces_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_deployments_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_endpoints_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_containers_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_versions_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_component_containers_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_component_versions_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_compute_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_containers_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_versions_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_datastores_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_containers_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_versions_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_jobs_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_labeling_jobs_operations.py delete mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_machine_learning_compute_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_containers_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_versions_operations.py delete mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_notebooks_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_deployments_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_endpoints_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_patch.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registries_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_code_containers_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_code_versions_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_component_containers_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_component_versions_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_environment_containers_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_environment_versions_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_model_containers_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_model_versions_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_schedules_operations.py diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/_meta.json b/sdk/machinelearning/azure-mgmt-machinelearningservices/_meta.json new file mode 100644 index 000000000000..6e1cf497ad4b --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/_meta.json @@ -0,0 +1,11 @@ +{ + "autorest": "3.9.2", + "use": [ + "@autorest/python@6.1.9", + "@autorest/modelerfour@4.24.3" + ], + "commit": "f80f0bd2d67738b45dfd812f380d512f3717b75f", + "repository_url": "https://github.com/Azure/azure-rest-api-specs", + "autorest_command": "autorest specification/machinelearningservices/resource-manager/readme.md --python --python-sdks-folder=/mnt/vss/_work/1/s/azure-sdk-for-python/sdk --use=@autorest/python@6.1.9 --use=@autorest/modelerfour@4.24.3 --version=3.9.2 --version-tolerant=False", + "readme": "specification/machinelearningservices/resource-manager/readme.md" +} \ No newline at end of file diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/__init__.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/__init__.py index 67feb9a70351..0f340c4a230e 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/__init__.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/__init__.py @@ -6,14 +6,19 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces +from ._azure_machine_learning_services import AzureMachineLearningServices from ._version import VERSION __version__ = VERSION -__all__ = ['AzureMachineLearningWorkspaces'] try: - from ._patch import patch_sdk # type: ignore - patch_sdk() + from ._patch import __all__ as _patch_all + from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import except ImportError: - pass + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = ["AzureMachineLearningServices"] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_azure_machine_learning_services.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_azure_machine_learning_services.py new file mode 100644 index 000000000000..10d63d1671f6 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_azure_machine_learning_services.py @@ -0,0 +1,306 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, TYPE_CHECKING + +from azure.core.rest import HttpRequest, HttpResponse +from azure.mgmt.core import ARMPipelineClient + +from . import models +from ._configuration import AzureMachineLearningServicesConfiguration +from ._serialization import Deserializer, Serializer +from .operations import ( + BatchDeploymentsOperations, + BatchEndpointsOperations, + CodeContainersOperations, + CodeVersionsOperations, + ComponentContainersOperations, + ComponentVersionsOperations, + ComputeOperations, + DataContainersOperations, + DataVersionsOperations, + DatastoresOperations, + EnvironmentContainersOperations, + EnvironmentVersionsOperations, + JobsOperations, + LabelingJobsOperations, + ModelContainersOperations, + ModelVersionsOperations, + OnlineDeploymentsOperations, + OnlineEndpointsOperations, + Operations, + PrivateEndpointConnectionsOperations, + PrivateLinkResourcesOperations, + QuotasOperations, + RegistriesOperations, + RegistryCodeContainersOperations, + RegistryCodeVersionsOperations, + RegistryComponentContainersOperations, + RegistryComponentVersionsOperations, + RegistryEnvironmentContainersOperations, + RegistryEnvironmentVersionsOperations, + RegistryModelContainersOperations, + RegistryModelVersionsOperations, + SchedulesOperations, + UsagesOperations, + VirtualMachineSizesOperations, + WorkspaceConnectionsOperations, + WorkspaceFeaturesOperations, + WorkspacesOperations, +) + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials import TokenCredential + + +class AzureMachineLearningServices: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes + """These APIs allow end users to operate on Azure Machine Learning Workspace resources. + + :ivar operations: Operations operations + :vartype operations: azure.mgmt.machinelearningservices.operations.Operations + :ivar workspaces: WorkspacesOperations operations + :vartype workspaces: azure.mgmt.machinelearningservices.operations.WorkspacesOperations + :ivar usages: UsagesOperations operations + :vartype usages: azure.mgmt.machinelearningservices.operations.UsagesOperations + :ivar virtual_machine_sizes: VirtualMachineSizesOperations operations + :vartype virtual_machine_sizes: + azure.mgmt.machinelearningservices.operations.VirtualMachineSizesOperations + :ivar quotas: QuotasOperations operations + :vartype quotas: azure.mgmt.machinelearningservices.operations.QuotasOperations + :ivar compute: ComputeOperations operations + :vartype compute: azure.mgmt.machinelearningservices.operations.ComputeOperations + :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations + :vartype private_endpoint_connections: + azure.mgmt.machinelearningservices.operations.PrivateEndpointConnectionsOperations + :ivar private_link_resources: PrivateLinkResourcesOperations operations + :vartype private_link_resources: + azure.mgmt.machinelearningservices.operations.PrivateLinkResourcesOperations + :ivar workspace_connections: WorkspaceConnectionsOperations operations + :vartype workspace_connections: + azure.mgmt.machinelearningservices.operations.WorkspaceConnectionsOperations + :ivar registry_code_containers: RegistryCodeContainersOperations operations + :vartype registry_code_containers: + azure.mgmt.machinelearningservices.operations.RegistryCodeContainersOperations + :ivar registry_code_versions: RegistryCodeVersionsOperations operations + :vartype registry_code_versions: + azure.mgmt.machinelearningservices.operations.RegistryCodeVersionsOperations + :ivar registry_component_containers: RegistryComponentContainersOperations operations + :vartype registry_component_containers: + azure.mgmt.machinelearningservices.operations.RegistryComponentContainersOperations + :ivar registry_component_versions: RegistryComponentVersionsOperations operations + :vartype registry_component_versions: + azure.mgmt.machinelearningservices.operations.RegistryComponentVersionsOperations + :ivar registry_environment_containers: RegistryEnvironmentContainersOperations operations + :vartype registry_environment_containers: + azure.mgmt.machinelearningservices.operations.RegistryEnvironmentContainersOperations + :ivar registry_environment_versions: RegistryEnvironmentVersionsOperations operations + :vartype registry_environment_versions: + azure.mgmt.machinelearningservices.operations.RegistryEnvironmentVersionsOperations + :ivar registry_model_containers: RegistryModelContainersOperations operations + :vartype registry_model_containers: + azure.mgmt.machinelearningservices.operations.RegistryModelContainersOperations + :ivar registry_model_versions: RegistryModelVersionsOperations operations + :vartype registry_model_versions: + azure.mgmt.machinelearningservices.operations.RegistryModelVersionsOperations + :ivar batch_endpoints: BatchEndpointsOperations operations + :vartype batch_endpoints: + azure.mgmt.machinelearningservices.operations.BatchEndpointsOperations + :ivar batch_deployments: BatchDeploymentsOperations operations + :vartype batch_deployments: + azure.mgmt.machinelearningservices.operations.BatchDeploymentsOperations + :ivar code_containers: CodeContainersOperations operations + :vartype code_containers: + azure.mgmt.machinelearningservices.operations.CodeContainersOperations + :ivar code_versions: CodeVersionsOperations operations + :vartype code_versions: azure.mgmt.machinelearningservices.operations.CodeVersionsOperations + :ivar component_containers: ComponentContainersOperations operations + :vartype component_containers: + azure.mgmt.machinelearningservices.operations.ComponentContainersOperations + :ivar component_versions: ComponentVersionsOperations operations + :vartype component_versions: + azure.mgmt.machinelearningservices.operations.ComponentVersionsOperations + :ivar data_containers: DataContainersOperations operations + :vartype data_containers: + azure.mgmt.machinelearningservices.operations.DataContainersOperations + :ivar data_versions: DataVersionsOperations operations + :vartype data_versions: azure.mgmt.machinelearningservices.operations.DataVersionsOperations + :ivar datastores: DatastoresOperations operations + :vartype datastores: azure.mgmt.machinelearningservices.operations.DatastoresOperations + :ivar environment_containers: EnvironmentContainersOperations operations + :vartype environment_containers: + azure.mgmt.machinelearningservices.operations.EnvironmentContainersOperations + :ivar environment_versions: EnvironmentVersionsOperations operations + :vartype environment_versions: + azure.mgmt.machinelearningservices.operations.EnvironmentVersionsOperations + :ivar jobs: JobsOperations operations + :vartype jobs: azure.mgmt.machinelearningservices.operations.JobsOperations + :ivar labeling_jobs: LabelingJobsOperations operations + :vartype labeling_jobs: azure.mgmt.machinelearningservices.operations.LabelingJobsOperations + :ivar model_containers: ModelContainersOperations operations + :vartype model_containers: + azure.mgmt.machinelearningservices.operations.ModelContainersOperations + :ivar model_versions: ModelVersionsOperations operations + :vartype model_versions: azure.mgmt.machinelearningservices.operations.ModelVersionsOperations + :ivar online_endpoints: OnlineEndpointsOperations operations + :vartype online_endpoints: + azure.mgmt.machinelearningservices.operations.OnlineEndpointsOperations + :ivar online_deployments: OnlineDeploymentsOperations operations + :vartype online_deployments: + azure.mgmt.machinelearningservices.operations.OnlineDeploymentsOperations + :ivar schedules: SchedulesOperations operations + :vartype schedules: azure.mgmt.machinelearningservices.operations.SchedulesOperations + :ivar workspace_features: WorkspaceFeaturesOperations operations + :vartype workspace_features: + azure.mgmt.machinelearningservices.operations.WorkspaceFeaturesOperations + :ivar registries: RegistriesOperations operations + :vartype registries: azure.mgmt.machinelearningservices.operations.RegistriesOperations + :param credential: Credential needed for the client to connect to Azure. Required. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The ID of the target subscription. Required. + :type subscription_id: str + :param base_url: Service URL. Default value is "https://management.azure.com". + :type base_url: str + :keyword api_version: Api Version. Default value is "2022-10-01-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + """ + + def __init__( + self, + credential: "TokenCredential", + subscription_id: str, + base_url: str = "https://management.azure.com", + **kwargs: Any + ) -> None: + self._config = AzureMachineLearningServicesConfiguration( + credential=credential, subscription_id=subscription_id, **kwargs + ) + self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + self._serialize.client_side_validation = False + self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) + self.workspaces = WorkspacesOperations(self._client, self._config, self._serialize, self._deserialize) + self.usages = UsagesOperations(self._client, self._config, self._serialize, self._deserialize) + self.virtual_machine_sizes = VirtualMachineSizesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.quotas = QuotasOperations(self._client, self._config, self._serialize, self._deserialize) + self.compute = ComputeOperations(self._client, self._config, self._serialize, self._deserialize) + self.private_endpoint_connections = PrivateEndpointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.private_link_resources = PrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.workspace_connections = WorkspaceConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_code_containers = RegistryCodeContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_code_versions = RegistryCodeVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_component_containers = RegistryComponentContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_component_versions = RegistryComponentVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_environment_containers = RegistryEnvironmentContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_environment_versions = RegistryEnvironmentVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_model_containers = RegistryModelContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_model_versions = RegistryModelVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.batch_endpoints = BatchEndpointsOperations(self._client, self._config, self._serialize, self._deserialize) + self.batch_deployments = BatchDeploymentsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.code_containers = CodeContainersOperations(self._client, self._config, self._serialize, self._deserialize) + self.code_versions = CodeVersionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.component_containers = ComponentContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.component_versions = ComponentVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.data_containers = DataContainersOperations(self._client, self._config, self._serialize, self._deserialize) + self.data_versions = DataVersionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.datastores = DatastoresOperations(self._client, self._config, self._serialize, self._deserialize) + self.environment_containers = EnvironmentContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.environment_versions = EnvironmentVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.jobs = JobsOperations(self._client, self._config, self._serialize, self._deserialize) + self.labeling_jobs = LabelingJobsOperations(self._client, self._config, self._serialize, self._deserialize) + self.model_containers = ModelContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.model_versions = ModelVersionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.online_endpoints = OnlineEndpointsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.online_deployments = OnlineDeploymentsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.schedules = SchedulesOperations(self._client, self._config, self._serialize, self._deserialize) + self.workspace_features = WorkspaceFeaturesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registries = RegistriesOperations(self._client, self._config, self._serialize, self._deserialize) + + def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, **kwargs) + + def close(self): + # type: () -> None + self._client.close() + + def __enter__(self): + # type: () -> AzureMachineLearningServices + self._client.__enter__() + return self + + def __exit__(self, *exc_details): + # type: (Any) -> None + self._client.__exit__(*exc_details) diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_azure_machine_learning_workspaces.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_azure_machine_learning_workspaces.py deleted file mode 100644 index 527b9e2bb787..000000000000 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_azure_machine_learning_workspaces.py +++ /dev/null @@ -1,121 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import TYPE_CHECKING - -from azure.mgmt.core import ARMPipelineClient -from msrest import Deserializer, Serializer - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Optional - - from azure.core.credentials import TokenCredential - -from ._configuration import AzureMachineLearningWorkspacesConfiguration -from .operations import Operations -from .operations import WorkspacesOperations -from .operations import WorkspaceFeaturesOperations -from .operations import NotebooksOperations -from .operations import UsagesOperations -from .operations import VirtualMachineSizesOperations -from .operations import QuotasOperations -from .operations import WorkspaceConnectionsOperations -from .operations import MachineLearningComputeOperations -from .operations import AzureMachineLearningWorkspacesOperationsMixin -from .operations import PrivateEndpointConnectionsOperations -from .operations import PrivateLinkResourcesOperations -from . import models - - -class AzureMachineLearningWorkspaces(AzureMachineLearningWorkspacesOperationsMixin): - """These APIs allow end users to operate on Azure Machine Learning Workspace resources. - - :ivar operations: Operations operations - :vartype operations: azure.mgmt.machinelearningservices.operations.Operations - :ivar workspaces: WorkspacesOperations operations - :vartype workspaces: azure.mgmt.machinelearningservices.operations.WorkspacesOperations - :ivar workspace_features: WorkspaceFeaturesOperations operations - :vartype workspace_features: azure.mgmt.machinelearningservices.operations.WorkspaceFeaturesOperations - :ivar notebooks: NotebooksOperations operations - :vartype notebooks: azure.mgmt.machinelearningservices.operations.NotebooksOperations - :ivar usages: UsagesOperations operations - :vartype usages: azure.mgmt.machinelearningservices.operations.UsagesOperations - :ivar virtual_machine_sizes: VirtualMachineSizesOperations operations - :vartype virtual_machine_sizes: azure.mgmt.machinelearningservices.operations.VirtualMachineSizesOperations - :ivar quotas: QuotasOperations operations - :vartype quotas: azure.mgmt.machinelearningservices.operations.QuotasOperations - :ivar workspace_connections: WorkspaceConnectionsOperations operations - :vartype workspace_connections: azure.mgmt.machinelearningservices.operations.WorkspaceConnectionsOperations - :ivar machine_learning_compute: MachineLearningComputeOperations operations - :vartype machine_learning_compute: azure.mgmt.machinelearningservices.operations.MachineLearningComputeOperations - :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations - :vartype private_endpoint_connections: azure.mgmt.machinelearningservices.operations.PrivateEndpointConnectionsOperations - :ivar private_link_resources: PrivateLinkResourcesOperations operations - :vartype private_link_resources: azure.mgmt.machinelearningservices.operations.PrivateLinkResourcesOperations - :param credential: Credential needed for the client to connect to Azure. - :type credential: ~azure.core.credentials.TokenCredential - :param subscription_id: Azure subscription identifier. - :type subscription_id: str - :param str base_url: Service URL - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - """ - - def __init__( - self, - credential, # type: "TokenCredential" - subscription_id, # type: str - base_url=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> None - if not base_url: - base_url = 'https://management.azure.com' - self._config = AzureMachineLearningWorkspacesConfiguration(credential, subscription_id, **kwargs) - self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) - - client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} - self._serialize = Serializer(client_models) - self._serialize.client_side_validation = False - self._deserialize = Deserializer(client_models) - - self.operations = Operations( - self._client, self._config, self._serialize, self._deserialize) - self.workspaces = WorkspacesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.workspace_features = WorkspaceFeaturesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.notebooks = NotebooksOperations( - self._client, self._config, self._serialize, self._deserialize) - self.usages = UsagesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.virtual_machine_sizes = VirtualMachineSizesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.quotas = QuotasOperations( - self._client, self._config, self._serialize, self._deserialize) - self.workspace_connections = WorkspaceConnectionsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.machine_learning_compute = MachineLearningComputeOperations( - self._client, self._config, self._serialize, self._deserialize) - self.private_endpoint_connections = PrivateEndpointConnectionsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.private_link_resources = PrivateLinkResourcesOperations( - self._client, self._config, self._serialize, self._deserialize) - - def close(self): - # type: () -> None - self._client.close() - - def __enter__(self): - # type: () -> AzureMachineLearningWorkspaces - self._client.__enter__() - return self - - def __exit__(self, *exc_details): - # type: (Any) -> None - self._client.__exit__(*exc_details) diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_configuration.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_configuration.py index 79ded5841781..84d74b0299ab 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_configuration.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_configuration.py @@ -6,66 +6,64 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING +from typing import Any, TYPE_CHECKING from azure.core.configuration import Configuration from azure.core.pipeline import policies -from azure.mgmt.core.policies import ARMHttpLoggingPolicy +from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy from ._version import VERSION if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any - from azure.core.credentials import TokenCredential -class AzureMachineLearningWorkspacesConfiguration(Configuration): - """Configuration for AzureMachineLearningWorkspaces. +class AzureMachineLearningServicesConfiguration(Configuration): # pylint: disable=too-many-instance-attributes + """Configuration for AzureMachineLearningServices. Note that all parameters used to create this instance are saved as instance attributes. - :param credential: Credential needed for the client to connect to Azure. + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials.TokenCredential - :param subscription_id: Azure subscription identifier. + :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str + :keyword api_version: Api Version. Default value is "2022-10-01-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str """ - def __init__( - self, - credential, # type: "TokenCredential" - subscription_id, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None: + super(AzureMachineLearningServicesConfiguration, self).__init__(**kwargs) + api_version = kwargs.pop("api_version", "2022-10-01-preview") # type: str + if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: raise ValueError("Parameter 'subscription_id' must not be None.") - super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs) self.credential = credential self.subscription_id = subscription_id - self.api_version = "2020-08-01" - self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) - kwargs.setdefault('sdk_moniker', 'mgmt-machinelearningservices/{}'.format(VERSION)) + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "mgmt-machinelearningservices/{}".format(VERSION)) self._configure(**kwargs) def _configure( - self, - **kwargs # type: Any + self, **kwargs # type: Any ): # type: (...) -> None - self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) - self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) - self.authentication_policy = kwargs.get('authentication_policy') + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") if self.credential and not self.authentication_policy: - self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) + self.authentication_policy = ARMChallengeAuthenticationPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_patch.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_patch.py new file mode 100644 index 000000000000..f7dd32510333 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_serialization.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_serialization.py new file mode 100644 index 000000000000..7c1dedb5133d --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_serialization.py @@ -0,0 +1,1970 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# pylint: skip-file + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote # type: ignore +import xml.etree.ElementTree as ET + +import isodate + +from typing import Dict, Any, cast, TYPE_CHECKING + +from azure.core.exceptions import DeserializationError, SerializationError, raise_with_traceback + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +if TYPE_CHECKING: + from typing import Optional, Union, AnyStr, IO, Mapping + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data, content_type=None): + # type: (Optional[Union[AnyStr, IO]], Optional[str]) -> Any + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise_with_traceback(DeserializationError, "XML is invalid") + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes, headers): + # type: (Optional[Union[AnyStr, IO]], Mapping) -> Any + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +try: + basestring # type: ignore + unicode_str = unicode # type: ignore +except NameError: + basestring = str # type: ignore + unicode_str = str # type: ignore + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + + +class UTC(datetime.tzinfo): + """Time Zone info for handling UTC""" + + def utcoffset(self, dt): + """UTF offset for UTC is 0.""" + return datetime.timedelta(0) + + def tzname(self, dt): + """Timestamp representation.""" + return "Z" + + def dst(self, dt): + """No daylight saving for UTC.""" + return datetime.timedelta(hours=1) + + +try: + from datetime import timezone as _FixedOffset +except ImportError: # Python 2.7 + + class _FixedOffset(datetime.tzinfo): # type: ignore + """Fixed offset in minutes east from UTC. + Copy/pasted from Python doc + :param datetime.timedelta offset: offset in timedelta format + """ + + def __init__(self, offset): + self.__offset = offset + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return str(self.__offset.total_seconds() / 3600) + + def __repr__(self): + return "".format(self.tzname(None)) + + def dst(self, dt): + return datetime.timedelta(0) + + def __getinitargs__(self): + return (self.__offset,) + + +try: + from datetime import timezone + + TZ_UTC = timezone.utc # type: ignore +except ImportError: + TZ_UTC = UTC() # type: ignore + +_FLATTEN = re.compile(r"(? y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes=None): + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize(self, target_obj, data_type=None, **kwargs): + """Serialize data into a string according to type. + + :param target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises: SerializationError if serialization fails. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() + try: + attributes = target_obj._attribute_map + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized.update(target_obj.additional_properties) + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) + continue + if xml_desc.get("text", False): + serialized.text = new_attr + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = unicode_str(new_attr) + serialized.append(local_node) + else: # JSON + for k in reversed(keys): + unflattened = {k: new_attr} + new_attr = unflattened + + _new_attr = new_attr + _serialized = serialized + for k in keys: + if k not in _serialized: + _serialized.update(_new_attr) + _new_attr = _new_attr[k] + _serialized = _serialized[k] + except ValueError: + continue + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise_with_traceback(SerializationError, msg, err) + else: + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises: SerializationError if serialization fails. + :raises: ValueError if data is None + """ + + # Just in case this is a dict + internal_data_type = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) + except DeserializationError as err: + raise_with_traceback(SerializationError, "Unable to build a model: " + str(err), err) + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + data = [self.serialize_data(d, internal_data_type, **kwargs) if d is not None else "" for d in data] + if not kwargs.get("skip_quote", False): + data = [quote(str(d), safe="") for d in data] + return str(self.serialize_iter(data, internal_data_type, **kwargs)) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :param bool required: Whether it's essential that the data not be + empty or None + :raises: AttributeError if required data is None. + :raises: ValueError if data is None + :raises: SerializationError if serialization fails. + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + elif data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise_with_traceback(SerializationError, msg.format(data, data_type), err) + else: + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param data: Object to be serialized. + :param str data_type: Type of object in the iterable. + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param data: Object to be serialized. + :rtype: str + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + else: + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list attr: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param bool required: Whether the objects in the iterable must + not be None or empty. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + :rtype: list, str + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError: + serialized.append(None) + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :param bool required: Whether the objects in the dictionary must + not be None or empty. + :rtype: dict + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is unicode_str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + elif obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) + return result + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) + + @staticmethod + def serialize_bytearray(attr, **kwargs): + """Serialize bytearray into base-64 string. + + :param attr: Object to be serialized. + :rtype: str + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): + """Serialize str into base-64 string. + + :param attr: Object to be serialized. + :rtype: str + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): + """Serialize Decimal object to float. + + :param attr: Object to be serialized. + :rtype: float + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): + """Serialize long (Py2) or int (Py3). + + :param attr: Object to be serialized. + :rtype: int/long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: TypeError if format invalid. + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError: + raise TypeError("RFC1123 object must be valid Datetime object.") + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: SerializationError if format invalid. + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise_with_traceback(SerializationError, msg, err) + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise_with_traceback(TypeError, msg, err) + + @staticmethod + def serialize_unix(attr, **kwargs): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises: SerializationError if format invalid + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError: + raise TypeError("Unix time object must be valid Datetime object.") + + +def rest_key_extractor(attr, attr_desc, data): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + # https://github.com/Azure/msrest-for-python/issues/197 + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor(attr, attr_desc, data): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + # https://github.com/Azure/msrest-for-python/issues/197 + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): + """Extract the attribute in "data" based on the last part of the JSON path key.""" + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + else: + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + else: # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer(object): + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes=None): + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, basestring): + return self.deserialize_data(data, response) + elif isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None: + return data + try: + attributes = response._attribute_map + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name + raise_with_traceback(DeserializationError, msg, err) + else: + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deseralize. + """ + if target is None: + return None, None + + if isinstance(target, basestring): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deseralize. + :param str content_type: Swagger "produces" if available. + """ + try: + return self(target_obj, data, content_type=content_type) + except: + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param raw_data: Data to be processed. + :param content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param response: The response model class. + :param d_attrs: The deserialized response attributes. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [k for k, v in response._validation.items() if v.get("readonly")] + const = [k for k, v in response._validation.items() if v.get("constant")] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) + raise DeserializationError(msg + str(err)) + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) + + def deserialize_data(self, data, data_type): + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise_with_traceback(DeserializationError, msg, err) + else: + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :rtype: dict + :raises: TypeError if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, basestring): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + else: + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :rtype: str, int, float or bool + :raises: TypeError if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + else: + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + elif isinstance(attr, basestring): + if attr.lower() in ["true", "1"]: + return True + elif attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): + return data + except NameError: + return str(data) + else: + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + # https://github.com/Azure/azure-rest-api-specs/issues/141 + try: + return list(enum_obj.__members__.values())[data] + except IndexError: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) + attr = attr + padding + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :rtype: Decimal + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(attr) + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise_with_traceback(DeserializationError, msg, err) + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :rtype: long or int + :raises: ValueError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :rtype: TimeDelta + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise_with_traceback(DeserializationError, msg, err) + else: + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :rtype: Date + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :rtype: Datetime + :raises: DeserializationError if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) + try: + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_vendor.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_vendor.py new file mode 100644 index 000000000000..9aad73fc743e --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_vendor.py @@ -0,0 +1,27 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.pipeline.transport import HttpRequest + + +def _convert_request(request, files=None): + data = request.content if not files else None + request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) + if files: + request.set_formdata_body(files) + return request + + +def _format_url_section(template, **kwargs): + components = template.split("/") + while components: + try: + return template.format(**kwargs) + except KeyError as key: + formatted_components = template.split("/") + components = [c for c in formatted_components if "{}".format(key.args[0]) not in c] + template = "/".join(components) diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_version.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_version.py index c47f66669f1b..e5754a47ce68 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_version.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "1.0.0" +VERSION = "1.0.0b1" diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/__init__.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/__init__.py index 872474577c4f..2df25e7e5804 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/__init__.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/__init__.py @@ -6,5 +6,16 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces -__all__ = ['AzureMachineLearningWorkspaces'] +from ._azure_machine_learning_services import AzureMachineLearningServices + +try: + from ._patch import __all__ as _patch_all + from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = ["AzureMachineLearningServices"] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_azure_machine_learning_services.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_azure_machine_learning_services.py new file mode 100644 index 000000000000..ff3603d6693a --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_azure_machine_learning_services.py @@ -0,0 +1,307 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Awaitable, TYPE_CHECKING + +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.mgmt.core import AsyncARMPipelineClient + +from .. import models +from .._serialization import Deserializer, Serializer +from ._configuration import AzureMachineLearningServicesConfiguration +from .operations import ( + BatchDeploymentsOperations, + BatchEndpointsOperations, + CodeContainersOperations, + CodeVersionsOperations, + ComponentContainersOperations, + ComponentVersionsOperations, + ComputeOperations, + DataContainersOperations, + DataVersionsOperations, + DatastoresOperations, + EnvironmentContainersOperations, + EnvironmentVersionsOperations, + JobsOperations, + LabelingJobsOperations, + ModelContainersOperations, + ModelVersionsOperations, + OnlineDeploymentsOperations, + OnlineEndpointsOperations, + Operations, + PrivateEndpointConnectionsOperations, + PrivateLinkResourcesOperations, + QuotasOperations, + RegistriesOperations, + RegistryCodeContainersOperations, + RegistryCodeVersionsOperations, + RegistryComponentContainersOperations, + RegistryComponentVersionsOperations, + RegistryEnvironmentContainersOperations, + RegistryEnvironmentVersionsOperations, + RegistryModelContainersOperations, + RegistryModelVersionsOperations, + SchedulesOperations, + UsagesOperations, + VirtualMachineSizesOperations, + WorkspaceConnectionsOperations, + WorkspaceFeaturesOperations, + WorkspacesOperations, +) + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + + +class AzureMachineLearningServices: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes + """These APIs allow end users to operate on Azure Machine Learning Workspace resources. + + :ivar operations: Operations operations + :vartype operations: azure.mgmt.machinelearningservices.aio.operations.Operations + :ivar workspaces: WorkspacesOperations operations + :vartype workspaces: azure.mgmt.machinelearningservices.aio.operations.WorkspacesOperations + :ivar usages: UsagesOperations operations + :vartype usages: azure.mgmt.machinelearningservices.aio.operations.UsagesOperations + :ivar virtual_machine_sizes: VirtualMachineSizesOperations operations + :vartype virtual_machine_sizes: + azure.mgmt.machinelearningservices.aio.operations.VirtualMachineSizesOperations + :ivar quotas: QuotasOperations operations + :vartype quotas: azure.mgmt.machinelearningservices.aio.operations.QuotasOperations + :ivar compute: ComputeOperations operations + :vartype compute: azure.mgmt.machinelearningservices.aio.operations.ComputeOperations + :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations + :vartype private_endpoint_connections: + azure.mgmt.machinelearningservices.aio.operations.PrivateEndpointConnectionsOperations + :ivar private_link_resources: PrivateLinkResourcesOperations operations + :vartype private_link_resources: + azure.mgmt.machinelearningservices.aio.operations.PrivateLinkResourcesOperations + :ivar workspace_connections: WorkspaceConnectionsOperations operations + :vartype workspace_connections: + azure.mgmt.machinelearningservices.aio.operations.WorkspaceConnectionsOperations + :ivar registry_code_containers: RegistryCodeContainersOperations operations + :vartype registry_code_containers: + azure.mgmt.machinelearningservices.aio.operations.RegistryCodeContainersOperations + :ivar registry_code_versions: RegistryCodeVersionsOperations operations + :vartype registry_code_versions: + azure.mgmt.machinelearningservices.aio.operations.RegistryCodeVersionsOperations + :ivar registry_component_containers: RegistryComponentContainersOperations operations + :vartype registry_component_containers: + azure.mgmt.machinelearningservices.aio.operations.RegistryComponentContainersOperations + :ivar registry_component_versions: RegistryComponentVersionsOperations operations + :vartype registry_component_versions: + azure.mgmt.machinelearningservices.aio.operations.RegistryComponentVersionsOperations + :ivar registry_environment_containers: RegistryEnvironmentContainersOperations operations + :vartype registry_environment_containers: + azure.mgmt.machinelearningservices.aio.operations.RegistryEnvironmentContainersOperations + :ivar registry_environment_versions: RegistryEnvironmentVersionsOperations operations + :vartype registry_environment_versions: + azure.mgmt.machinelearningservices.aio.operations.RegistryEnvironmentVersionsOperations + :ivar registry_model_containers: RegistryModelContainersOperations operations + :vartype registry_model_containers: + azure.mgmt.machinelearningservices.aio.operations.RegistryModelContainersOperations + :ivar registry_model_versions: RegistryModelVersionsOperations operations + :vartype registry_model_versions: + azure.mgmt.machinelearningservices.aio.operations.RegistryModelVersionsOperations + :ivar batch_endpoints: BatchEndpointsOperations operations + :vartype batch_endpoints: + azure.mgmt.machinelearningservices.aio.operations.BatchEndpointsOperations + :ivar batch_deployments: BatchDeploymentsOperations operations + :vartype batch_deployments: + azure.mgmt.machinelearningservices.aio.operations.BatchDeploymentsOperations + :ivar code_containers: CodeContainersOperations operations + :vartype code_containers: + azure.mgmt.machinelearningservices.aio.operations.CodeContainersOperations + :ivar code_versions: CodeVersionsOperations operations + :vartype code_versions: + azure.mgmt.machinelearningservices.aio.operations.CodeVersionsOperations + :ivar component_containers: ComponentContainersOperations operations + :vartype component_containers: + azure.mgmt.machinelearningservices.aio.operations.ComponentContainersOperations + :ivar component_versions: ComponentVersionsOperations operations + :vartype component_versions: + azure.mgmt.machinelearningservices.aio.operations.ComponentVersionsOperations + :ivar data_containers: DataContainersOperations operations + :vartype data_containers: + azure.mgmt.machinelearningservices.aio.operations.DataContainersOperations + :ivar data_versions: DataVersionsOperations operations + :vartype data_versions: + azure.mgmt.machinelearningservices.aio.operations.DataVersionsOperations + :ivar datastores: DatastoresOperations operations + :vartype datastores: azure.mgmt.machinelearningservices.aio.operations.DatastoresOperations + :ivar environment_containers: EnvironmentContainersOperations operations + :vartype environment_containers: + azure.mgmt.machinelearningservices.aio.operations.EnvironmentContainersOperations + :ivar environment_versions: EnvironmentVersionsOperations operations + :vartype environment_versions: + azure.mgmt.machinelearningservices.aio.operations.EnvironmentVersionsOperations + :ivar jobs: JobsOperations operations + :vartype jobs: azure.mgmt.machinelearningservices.aio.operations.JobsOperations + :ivar labeling_jobs: LabelingJobsOperations operations + :vartype labeling_jobs: + azure.mgmt.machinelearningservices.aio.operations.LabelingJobsOperations + :ivar model_containers: ModelContainersOperations operations + :vartype model_containers: + azure.mgmt.machinelearningservices.aio.operations.ModelContainersOperations + :ivar model_versions: ModelVersionsOperations operations + :vartype model_versions: + azure.mgmt.machinelearningservices.aio.operations.ModelVersionsOperations + :ivar online_endpoints: OnlineEndpointsOperations operations + :vartype online_endpoints: + azure.mgmt.machinelearningservices.aio.operations.OnlineEndpointsOperations + :ivar online_deployments: OnlineDeploymentsOperations operations + :vartype online_deployments: + azure.mgmt.machinelearningservices.aio.operations.OnlineDeploymentsOperations + :ivar schedules: SchedulesOperations operations + :vartype schedules: azure.mgmt.machinelearningservices.aio.operations.SchedulesOperations + :ivar workspace_features: WorkspaceFeaturesOperations operations + :vartype workspace_features: + azure.mgmt.machinelearningservices.aio.operations.WorkspaceFeaturesOperations + :ivar registries: RegistriesOperations operations + :vartype registries: azure.mgmt.machinelearningservices.aio.operations.RegistriesOperations + :param credential: Credential needed for the client to connect to Azure. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The ID of the target subscription. Required. + :type subscription_id: str + :param base_url: Service URL. Default value is "https://management.azure.com". + :type base_url: str + :keyword api_version: Api Version. Default value is "2022-10-01-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + base_url: str = "https://management.azure.com", + **kwargs: Any + ) -> None: + self._config = AzureMachineLearningServicesConfiguration( + credential=credential, subscription_id=subscription_id, **kwargs + ) + self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + self._serialize.client_side_validation = False + self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) + self.workspaces = WorkspacesOperations(self._client, self._config, self._serialize, self._deserialize) + self.usages = UsagesOperations(self._client, self._config, self._serialize, self._deserialize) + self.virtual_machine_sizes = VirtualMachineSizesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.quotas = QuotasOperations(self._client, self._config, self._serialize, self._deserialize) + self.compute = ComputeOperations(self._client, self._config, self._serialize, self._deserialize) + self.private_endpoint_connections = PrivateEndpointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.private_link_resources = PrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.workspace_connections = WorkspaceConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_code_containers = RegistryCodeContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_code_versions = RegistryCodeVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_component_containers = RegistryComponentContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_component_versions = RegistryComponentVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_environment_containers = RegistryEnvironmentContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_environment_versions = RegistryEnvironmentVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_model_containers = RegistryModelContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_model_versions = RegistryModelVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.batch_endpoints = BatchEndpointsOperations(self._client, self._config, self._serialize, self._deserialize) + self.batch_deployments = BatchDeploymentsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.code_containers = CodeContainersOperations(self._client, self._config, self._serialize, self._deserialize) + self.code_versions = CodeVersionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.component_containers = ComponentContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.component_versions = ComponentVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.data_containers = DataContainersOperations(self._client, self._config, self._serialize, self._deserialize) + self.data_versions = DataVersionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.datastores = DatastoresOperations(self._client, self._config, self._serialize, self._deserialize) + self.environment_containers = EnvironmentContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.environment_versions = EnvironmentVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.jobs = JobsOperations(self._client, self._config, self._serialize, self._deserialize) + self.labeling_jobs = LabelingJobsOperations(self._client, self._config, self._serialize, self._deserialize) + self.model_containers = ModelContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.model_versions = ModelVersionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.online_endpoints = OnlineEndpointsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.online_deployments = OnlineDeploymentsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.schedules = SchedulesOperations(self._client, self._config, self._serialize, self._deserialize) + self.workspace_features = WorkspaceFeaturesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registries = RegistriesOperations(self._client, self._config, self._serialize, self._deserialize) + + def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, **kwargs) + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> "AzureMachineLearningServices": + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_azure_machine_learning_workspaces.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_azure_machine_learning_workspaces.py deleted file mode 100644 index 77636808b2d6..000000000000 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_azure_machine_learning_workspaces.py +++ /dev/null @@ -1,115 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import Any, Optional, TYPE_CHECKING - -from azure.mgmt.core import AsyncARMPipelineClient -from msrest import Deserializer, Serializer - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from azure.core.credentials_async import AsyncTokenCredential - -from ._configuration import AzureMachineLearningWorkspacesConfiguration -from .operations import Operations -from .operations import WorkspacesOperations -from .operations import WorkspaceFeaturesOperations -from .operations import NotebooksOperations -from .operations import UsagesOperations -from .operations import VirtualMachineSizesOperations -from .operations import QuotasOperations -from .operations import WorkspaceConnectionsOperations -from .operations import MachineLearningComputeOperations -from .operations import AzureMachineLearningWorkspacesOperationsMixin -from .operations import PrivateEndpointConnectionsOperations -from .operations import PrivateLinkResourcesOperations -from .. import models - - -class AzureMachineLearningWorkspaces(AzureMachineLearningWorkspacesOperationsMixin): - """These APIs allow end users to operate on Azure Machine Learning Workspace resources. - - :ivar operations: Operations operations - :vartype operations: azure.mgmt.machinelearningservices.aio.operations.Operations - :ivar workspaces: WorkspacesOperations operations - :vartype workspaces: azure.mgmt.machinelearningservices.aio.operations.WorkspacesOperations - :ivar workspace_features: WorkspaceFeaturesOperations operations - :vartype workspace_features: azure.mgmt.machinelearningservices.aio.operations.WorkspaceFeaturesOperations - :ivar notebooks: NotebooksOperations operations - :vartype notebooks: azure.mgmt.machinelearningservices.aio.operations.NotebooksOperations - :ivar usages: UsagesOperations operations - :vartype usages: azure.mgmt.machinelearningservices.aio.operations.UsagesOperations - :ivar virtual_machine_sizes: VirtualMachineSizesOperations operations - :vartype virtual_machine_sizes: azure.mgmt.machinelearningservices.aio.operations.VirtualMachineSizesOperations - :ivar quotas: QuotasOperations operations - :vartype quotas: azure.mgmt.machinelearningservices.aio.operations.QuotasOperations - :ivar workspace_connections: WorkspaceConnectionsOperations operations - :vartype workspace_connections: azure.mgmt.machinelearningservices.aio.operations.WorkspaceConnectionsOperations - :ivar machine_learning_compute: MachineLearningComputeOperations operations - :vartype machine_learning_compute: azure.mgmt.machinelearningservices.aio.operations.MachineLearningComputeOperations - :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations - :vartype private_endpoint_connections: azure.mgmt.machinelearningservices.aio.operations.PrivateEndpointConnectionsOperations - :ivar private_link_resources: PrivateLinkResourcesOperations operations - :vartype private_link_resources: azure.mgmt.machinelearningservices.aio.operations.PrivateLinkResourcesOperations - :param credential: Credential needed for the client to connect to Azure. - :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: Azure subscription identifier. - :type subscription_id: str - :param str base_url: Service URL - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - """ - - def __init__( - self, - credential: "AsyncTokenCredential", - subscription_id: str, - base_url: Optional[str] = None, - **kwargs: Any - ) -> None: - if not base_url: - base_url = 'https://management.azure.com' - self._config = AzureMachineLearningWorkspacesConfiguration(credential, subscription_id, **kwargs) - self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) - - client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} - self._serialize = Serializer(client_models) - self._serialize.client_side_validation = False - self._deserialize = Deserializer(client_models) - - self.operations = Operations( - self._client, self._config, self._serialize, self._deserialize) - self.workspaces = WorkspacesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.workspace_features = WorkspaceFeaturesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.notebooks = NotebooksOperations( - self._client, self._config, self._serialize, self._deserialize) - self.usages = UsagesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.virtual_machine_sizes = VirtualMachineSizesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.quotas = QuotasOperations( - self._client, self._config, self._serialize, self._deserialize) - self.workspace_connections = WorkspaceConnectionsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.machine_learning_compute = MachineLearningComputeOperations( - self._client, self._config, self._serialize, self._deserialize) - self.private_endpoint_connections = PrivateEndpointConnectionsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.private_link_resources = PrivateLinkResourcesOperations( - self._client, self._config, self._serialize, self._deserialize) - - async def close(self) -> None: - await self._client.close() - - async def __aenter__(self) -> "AzureMachineLearningWorkspaces": - await self._client.__aenter__() - return self - - async def __aexit__(self, *exc_details) -> None: - await self._client.__aexit__(*exc_details) diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_configuration.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_configuration.py index a11cb071c326..23e90222e814 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_configuration.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_configuration.py @@ -10,7 +10,7 @@ from azure.core.configuration import Configuration from azure.core.pipeline import policies -from azure.mgmt.core.policies import ARMHttpLoggingPolicy +from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy from .._version import VERSION @@ -19,49 +19,48 @@ from azure.core.credentials_async import AsyncTokenCredential -class AzureMachineLearningWorkspacesConfiguration(Configuration): - """Configuration for AzureMachineLearningWorkspaces. +class AzureMachineLearningServicesConfiguration(Configuration): # pylint: disable=too-many-instance-attributes + """Configuration for AzureMachineLearningServices. Note that all parameters used to create this instance are saved as instance attributes. - :param credential: Credential needed for the client to connect to Azure. + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: Azure subscription identifier. + :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str + :keyword api_version: Api Version. Default value is "2022-10-01-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str """ - def __init__( - self, - credential: "AsyncTokenCredential", - subscription_id: str, - **kwargs: Any - ) -> None: + def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None: + super(AzureMachineLearningServicesConfiguration, self).__init__(**kwargs) + api_version = kwargs.pop("api_version", "2022-10-01-preview") # type: str + if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: raise ValueError("Parameter 'subscription_id' must not be None.") - super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs) self.credential = credential self.subscription_id = subscription_id - self.api_version = "2020-08-01" - self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) - kwargs.setdefault('sdk_moniker', 'mgmt-machinelearningservices/{}'.format(VERSION)) + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "mgmt-machinelearningservices/{}".format(VERSION)) self._configure(**kwargs) - def _configure( - self, - **kwargs: Any - ) -> None: - self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) - self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) - self.authentication_policy = kwargs.get('authentication_policy') + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") if self.credential and not self.authentication_policy: - self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) + self.authentication_policy = AsyncARMChallengeAuthenticationPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_patch.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_patch.py new file mode 100644 index 000000000000..f7dd32510333 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/__init__.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/__init__.py index 516999b100d8..261f5da876a3 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/__init__.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/__init__.py @@ -8,28 +8,84 @@ from ._operations import Operations from ._workspaces_operations import WorkspacesOperations -from ._workspace_features_operations import WorkspaceFeaturesOperations -from ._notebooks_operations import NotebooksOperations from ._usages_operations import UsagesOperations from ._virtual_machine_sizes_operations import VirtualMachineSizesOperations from ._quotas_operations import QuotasOperations -from ._workspace_connections_operations import WorkspaceConnectionsOperations -from ._machine_learning_compute_operations import MachineLearningComputeOperations -from ._azure_machine_learning_workspaces_operations import AzureMachineLearningWorkspacesOperationsMixin +from ._compute_operations import ComputeOperations from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations from ._private_link_resources_operations import PrivateLinkResourcesOperations +from ._workspace_connections_operations import WorkspaceConnectionsOperations +from ._registry_code_containers_operations import RegistryCodeContainersOperations +from ._registry_code_versions_operations import RegistryCodeVersionsOperations +from ._registry_component_containers_operations import RegistryComponentContainersOperations +from ._registry_component_versions_operations import RegistryComponentVersionsOperations +from ._registry_environment_containers_operations import RegistryEnvironmentContainersOperations +from ._registry_environment_versions_operations import RegistryEnvironmentVersionsOperations +from ._registry_model_containers_operations import RegistryModelContainersOperations +from ._registry_model_versions_operations import RegistryModelVersionsOperations +from ._batch_endpoints_operations import BatchEndpointsOperations +from ._batch_deployments_operations import BatchDeploymentsOperations +from ._code_containers_operations import CodeContainersOperations +from ._code_versions_operations import CodeVersionsOperations +from ._component_containers_operations import ComponentContainersOperations +from ._component_versions_operations import ComponentVersionsOperations +from ._data_containers_operations import DataContainersOperations +from ._data_versions_operations import DataVersionsOperations +from ._datastores_operations import DatastoresOperations +from ._environment_containers_operations import EnvironmentContainersOperations +from ._environment_versions_operations import EnvironmentVersionsOperations +from ._jobs_operations import JobsOperations +from ._labeling_jobs_operations import LabelingJobsOperations +from ._model_containers_operations import ModelContainersOperations +from ._model_versions_operations import ModelVersionsOperations +from ._online_endpoints_operations import OnlineEndpointsOperations +from ._online_deployments_operations import OnlineDeploymentsOperations +from ._schedules_operations import SchedulesOperations +from ._workspace_features_operations import WorkspaceFeaturesOperations +from ._registries_operations import RegistriesOperations + +from ._patch import __all__ as _patch_all +from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk __all__ = [ - 'Operations', - 'WorkspacesOperations', - 'WorkspaceFeaturesOperations', - 'NotebooksOperations', - 'UsagesOperations', - 'VirtualMachineSizesOperations', - 'QuotasOperations', - 'WorkspaceConnectionsOperations', - 'MachineLearningComputeOperations', - 'AzureMachineLearningWorkspacesOperationsMixin', - 'PrivateEndpointConnectionsOperations', - 'PrivateLinkResourcesOperations', + "Operations", + "WorkspacesOperations", + "UsagesOperations", + "VirtualMachineSizesOperations", + "QuotasOperations", + "ComputeOperations", + "PrivateEndpointConnectionsOperations", + "PrivateLinkResourcesOperations", + "WorkspaceConnectionsOperations", + "RegistryCodeContainersOperations", + "RegistryCodeVersionsOperations", + "RegistryComponentContainersOperations", + "RegistryComponentVersionsOperations", + "RegistryEnvironmentContainersOperations", + "RegistryEnvironmentVersionsOperations", + "RegistryModelContainersOperations", + "RegistryModelVersionsOperations", + "BatchEndpointsOperations", + "BatchDeploymentsOperations", + "CodeContainersOperations", + "CodeVersionsOperations", + "ComponentContainersOperations", + "ComponentVersionsOperations", + "DataContainersOperations", + "DataVersionsOperations", + "DatastoresOperations", + "EnvironmentContainersOperations", + "EnvironmentVersionsOperations", + "JobsOperations", + "LabelingJobsOperations", + "ModelContainersOperations", + "ModelVersionsOperations", + "OnlineEndpointsOperations", + "OnlineDeploymentsOperations", + "SchedulesOperations", + "WorkspaceFeaturesOperations", + "RegistriesOperations", ] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_azure_machine_learning_workspaces_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_azure_machine_learning_workspaces_operations.py deleted file mode 100644 index dd37ffc498c3..000000000000 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_azure_machine_learning_workspaces_operations.py +++ /dev/null @@ -1,89 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class AzureMachineLearningWorkspacesOperationsMixin: - - def list_skus( - self, - **kwargs - ) -> AsyncIterable["_models.SkuListResult"]: - """Lists all skus with associated features. - - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either SkuListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.SkuListResult] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.SkuListResult"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - if not next_link: - # Construct URL - url = self.list_skus.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('SkuListResult', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_skus.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces/skus'} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_batch_deployments_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_batch_deployments_operations.py new file mode 100644 index 000000000000..f1894a66e9e9 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_batch_deployments_operations.py @@ -0,0 +1,888 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._batch_deployments_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, + build_update_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class BatchDeploymentsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`batch_deployments` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_models.BatchDeployment"]: + """Lists Batch inference deployments in the workspace. + + Lists Batch inference deployments in the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Endpoint name. Required. + :type endpoint_name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Top of list. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BatchDeployment or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BatchDeploymentTrackedResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + order_by=order_by, + top=top, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("BatchDeploymentTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete Batch Inference deployment (asynchronous). + + Delete Batch Inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference deployment identifier. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any + ) -> _models.BatchDeployment: + """Gets a batch inference deployment by id. + + Gets a batch inference deployment by id. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch deployments. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BatchDeployment or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.BatchDeployment + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BatchDeployment] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("BatchDeployment", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties, IO], + **kwargs: Any + ) -> Optional[_models.BatchDeployment]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.BatchDeployment]] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("BatchDeployment", pipeline_response) + + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchDeployment]: + """Update a batch inference deployment (asynchronous). + + Update a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. Required. + :type deployment_name: str + :param body: Batch inference deployment definition object. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchDeployment]: + """Update a batch inference deployment (asynchronous). + + Update a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. Required. + :type deployment_name: str + :param body: Batch inference deployment definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchDeployment]: + """Update a batch inference deployment (asynchronous). + + Update a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. Required. + :type deployment_name: str + :param body: Batch inference deployment definition object. Is either a model type or a IO type. + Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties + or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BatchDeployment] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("BatchDeployment", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.BatchDeployment, IO], + **kwargs: Any + ) -> _models.BatchDeployment: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BatchDeployment] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "BatchDeployment") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("BatchDeployment", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("BatchDeployment", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.BatchDeployment, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchDeployment]: + """Creates/updates a batch inference deployment (asynchronous). + + Creates/updates a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. Required. + :type deployment_name: str + :param body: Batch inference deployment definition object. Required. + :type body: ~azure.mgmt.machinelearningservices.models.BatchDeployment + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchDeployment]: + """Creates/updates a batch inference deployment (asynchronous). + + Creates/updates a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. Required. + :type deployment_name: str + :param body: Batch inference deployment definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.BatchDeployment, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchDeployment]: + """Creates/updates a batch inference deployment (asynchronous). + + Creates/updates a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. Required. + :type deployment_name: str + :param body: Batch inference deployment definition object. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.BatchDeployment or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BatchDeployment] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("BatchDeployment", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_batch_endpoints_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_batch_endpoints_operations.py new file mode 100644 index 000000000000..31cdc28336c3 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_batch_endpoints_operations.py @@ -0,0 +1,916 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._batch_endpoints_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_keys_request, + build_list_request, + build_update_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class BatchEndpointsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`batch_endpoints` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + count: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_models.BatchEndpoint"]: + """Lists Batch inference endpoint in the workspace. + + Lists Batch inference endpoint in the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param count: Number of endpoints to be retrieved in a page of results. Default value is None. + :type count: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BatchEndpoint or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BatchEndpointTrackedResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + count=count, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("BatchEndpointTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete Batch Inference Endpoint (asynchronous). + + Delete Batch Inference Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference Endpoint name. Required. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> _models.BatchEndpoint: + """Gets a batch inference endpoint by name. + + Gets a batch inference endpoint by name. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Name for the Batch Endpoint. Required. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BatchEndpoint or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.BatchEndpoint + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BatchEndpoint] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("BatchEndpoint", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.PartialMinimalTrackedResourceWithIdentity, IO], + **kwargs: Any + ) -> Optional[_models.BatchEndpoint]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.BatchEndpoint]] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialMinimalTrackedResourceWithIdentity") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("BatchEndpoint", pipeline_response) + + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.PartialMinimalTrackedResourceWithIdentity, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchEndpoint]: + """Update a batch inference endpoint (asynchronous). + + Update a batch inference endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. Required. + :type endpoint_name: str + :param body: Mutable batch inference endpoint definition object. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchEndpoint]: + """Update a batch inference endpoint (asynchronous). + + Update a batch inference endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. Required. + :type endpoint_name: str + :param body: Mutable batch inference endpoint definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.PartialMinimalTrackedResourceWithIdentity, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchEndpoint]: + """Update a batch inference endpoint (asynchronous). + + Update a batch inference endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. Required. + :type endpoint_name: str + :param body: Mutable batch inference endpoint definition object. Is either a model type or a IO + type. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BatchEndpoint] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("BatchEndpoint", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.BatchEndpoint, IO], + **kwargs: Any + ) -> _models.BatchEndpoint: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BatchEndpoint] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "BatchEndpoint") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("BatchEndpoint", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("BatchEndpoint", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.BatchEndpoint, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchEndpoint]: + """Creates a batch inference endpoint (asynchronous). + + Creates a batch inference endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. Required. + :type endpoint_name: str + :param body: Batch inference endpoint definition object. Required. + :type body: ~azure.mgmt.machinelearningservices.models.BatchEndpoint + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchEndpoint]: + """Creates a batch inference endpoint (asynchronous). + + Creates a batch inference endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. Required. + :type endpoint_name: str + :param body: Batch inference endpoint definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.BatchEndpoint, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchEndpoint]: + """Creates a batch inference endpoint (asynchronous). + + Creates a batch inference endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. Required. + :type endpoint_name: str + :param body: Batch inference endpoint definition object. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.BatchEndpoint or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BatchEndpoint] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("BatchEndpoint", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + @distributed_trace_async + async def list_keys( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> _models.EndpointAuthKeys: + """Lists batch Inference Endpoint keys. + + Lists batch Inference Endpoint keys. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference Endpoint name. Required. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EndpointAuthKeys or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.EndpointAuthKeys] + + request = build_list_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_keys.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("EndpointAuthKeys", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/listkeys"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_code_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_code_containers_operations.py new file mode 100644 index 000000000000..9eb0f9f1e841 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_code_containers_operations.py @@ -0,0 +1,437 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._code_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class CodeContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`code_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, workspace_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> AsyncIterable["_models.CodeContainer"]: + """List containers. + + List containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either CodeContainer or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.CodeContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("CodeContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.CodeContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeContainer] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("CodeContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}"} # type: ignore + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.CodeContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CodeContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CodeContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.CodeContainer, IO], + **kwargs: Any + ) -> _models.CodeContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeContainer] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "CodeContainer") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("CodeContainer", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("CodeContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_code_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_code_versions_operations.py new file mode 100644 index 000000000000..3a8759e1387b --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_code_versions_operations.py @@ -0,0 +1,468 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._code_versions_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class CodeVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`code_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_models.CodeVersion"]: + """List versions. + + List versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either CodeVersion or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.CodeVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + order_by=order_by, + top=top, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("CodeVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> None: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.CodeVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeVersion] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("CodeVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}"} # type: ignore + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.CodeVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CodeVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CodeVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.CodeVersion, IO], + **kwargs: Any + ) -> _models.CodeVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeVersion] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "CodeVersion") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("CodeVersion", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("CodeVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_component_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_component_containers_operations.py new file mode 100644 index 000000000000..01c93100d4dd --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_component_containers_operations.py @@ -0,0 +1,446 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._component_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class ComponentContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`component_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.ComponentContainer"]: + """List component containers. + + List component containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ComponentContainer or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ComponentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ComponentContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.ComponentContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentContainer] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ComponentContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}"} # type: ignore + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.ComponentContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ComponentContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ComponentContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.ComponentContainer, IO], + **kwargs: Any + ) -> _models.ComponentContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentContainer] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ComponentContainer") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ComponentContainer", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ComponentContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_component_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_component_versions_operations.py new file mode 100644 index 000000000000..0479ebc602c0 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_component_versions_operations.py @@ -0,0 +1,473 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._component_versions_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class ComponentVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`component_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.ComponentVersion"]: + """List component versions. + + List component versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Component name. Required. + :type name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ComponentVersion or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ComponentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + order_by=order_by, + top=top, + skip=skip, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ComponentVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> None: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.ComponentVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentVersion] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ComponentVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}"} # type: ignore + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.ComponentVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ComponentVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ComponentVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.ComponentVersion, IO], + **kwargs: Any + ) -> _models.ComponentVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentVersion] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ComponentVersion") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ComponentVersion", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ComponentVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_compute_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_compute_operations.py new file mode 100644 index 000000000000..c05d1d49e34b --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_compute_operations.py @@ -0,0 +1,1625 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, List, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._compute_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_keys_request, + build_list_nodes_request, + build_list_request, + build_restart_request, + build_start_request, + build_stop_request, + build_update_custom_services_request, + build_update_idle_shutdown_setting_request, + build_update_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class ComputeOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`compute` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, workspace_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> AsyncIterable["_models.ComputeResource"]: + """Gets computes in specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ComputeResource or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.PaginatedComputeResourcesList] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("PaginatedComputeResourcesList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> _models.ComputeResource: + """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are + not returned - use 'keys' nested resource to get them. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComputeResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComputeResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComputeResource] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ComputeResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.ComputeResource, IO], + **kwargs: Any + ) -> _models.ComputeResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComputeResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "ComputeResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("ComputeResource", pipeline_response) + + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ComputeResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.ComputeResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ComputeResource]: + """Creates or updates compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify + that it does not exist yet. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: Payload with Machine Learning compute definition. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.ComputeResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ComputeResource]: + """Creates or updates compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify + that it does not exist yet. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: Payload with Machine Learning compute definition. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.ComputeResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.ComputeResource]: + """Creates or updates compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify + that it does not exist yet. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: Payload with Machine Learning compute definition. Is either a model type or + a IO type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.ComputeResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComputeResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ComputeResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.ClusterUpdateParameters, IO], + **kwargs: Any + ) -> _models.ComputeResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComputeResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "ClusterUpdateParameters") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ComputeResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.ClusterUpdateParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ComputeResource]: + """Updates properties of a compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: Additional parameters for cluster update. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.ClusterUpdateParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ComputeResource]: + """Updates properties of a compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: Additional parameters for cluster update. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.ClusterUpdateParameters, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.ComputeResource]: + """Updates properties of a compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: Additional parameters for cluster update. Is either a model type or a IO + type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.ClusterUpdateParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComputeResource] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ComputeResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + underlying_resource_action: Union[str, _models.UnderlyingResourceAction], + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + underlying_resource_action=underlying_resource_action, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + underlying_resource_action: Union[str, _models.UnderlyingResourceAction], + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Deletes specified Machine Learning compute. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the + underlying compute from workspace if 'Detach'. Known values are: "Delete" and "Detach". + Required. + :type underlying_resource_action: str or + ~azure.mgmt.machinelearningservices.models.UnderlyingResourceAction + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + underlying_resource_action=underlying_resource_action, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + @overload + async def update_custom_services( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + custom_services: List[_models.CustomService], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Updates the custom services list. The list of custom services provided shall be overwritten. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param custom_services: New list of Custom Services. Required. + :type custom_services: list[~azure.mgmt.machinelearningservices.models.CustomService] + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update_custom_services( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + custom_services: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Updates the custom services list. The list of custom services provided shall be overwritten. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param custom_services: New list of Custom Services. Required. + :type custom_services: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def update_custom_services( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + custom_services: Union[List[_models.CustomService], IO], + **kwargs: Any + ) -> None: + """Updates the custom services list. The list of custom services provided shall be overwritten. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param custom_services: New list of Custom Services. Is either a list type or a IO type. + Required. + :type custom_services: list[~azure.mgmt.machinelearningservices.models.CustomService] or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(custom_services, (IO, bytes)): + _content = custom_services + else: + _json = self._serialize.body(custom_services, "[CustomService]") + + request = build_update_custom_services_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.update_custom_services.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + update_custom_services.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/customServices"} # type: ignore + + @distributed_trace + def list_nodes( + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> AsyncIterable["_models.AmlComputeNodeInformation"]: + """Get the details (e.g IP address, port etc) of all the compute nodes in the compute. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either AmlComputeNodeInformation or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.AmlComputeNodeInformation] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.AmlComputeNodesInformation] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_nodes_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_nodes.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("AmlComputeNodesInformation", pipeline_response) + list_of_elem = deserialized.nodes + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list_nodes.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes"} # type: ignore + + @distributed_trace_async + async def list_keys( + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> _models.ComputeSecrets: + """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComputeSecrets or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComputeSecrets + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComputeSecrets] + + request = build_list_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_keys.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ComputeSecrets", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys"} # type: ignore + + async def _start_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_start_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._start_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _start_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start"} # type: ignore + + @distributed_trace_async + async def begin_start( + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Posts a start action to a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._start_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_start.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start"} # type: ignore + + async def _stop_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_stop_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._stop_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _stop_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop"} # type: ignore + + @distributed_trace_async + async def begin_stop( + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Posts a stop action to a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._stop_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_stop.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop"} # type: ignore + + async def _restart_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_restart_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._restart_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _restart_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart"} # type: ignore + + @distributed_trace_async + async def begin_restart( + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Posts a restart action to a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._restart_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_restart.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart"} # type: ignore + + @overload + async def update_idle_shutdown_setting( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.IdleShutdownSetting, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Updates the idle shutdown setting of a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating idle shutdown setting of specified ComputeInstance. + Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.IdleShutdownSetting + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update_idle_shutdown_setting( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Updates the idle shutdown setting of a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating idle shutdown setting of specified ComputeInstance. + Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def update_idle_shutdown_setting( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.IdleShutdownSetting, IO], + **kwargs: Any + ) -> None: + """Updates the idle shutdown setting of a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating idle shutdown setting of specified ComputeInstance. + Is either a model type or a IO type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.IdleShutdownSetting or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "IdleShutdownSetting") + + request = build_update_idle_shutdown_setting_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.update_idle_shutdown_setting.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + update_idle_shutdown_setting.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/updateIdleShutdownSetting"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_data_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_data_containers_operations.py new file mode 100644 index 000000000000..f277dea315c8 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_data_containers_operations.py @@ -0,0 +1,446 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._data_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class DataContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`data_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.DataContainer"]: + """List data containers. + + List data containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DataContainer or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.DataContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DataContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("DataContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.DataContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DataContainer] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("DataContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}"} # type: ignore + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.DataContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DataContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DataContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.DataContainer, IO], + **kwargs: Any + ) -> _models.DataContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DataContainer] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "DataContainer") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("DataContainer", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("DataContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_data_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_data_versions_operations.py new file mode 100644 index 000000000000..2e9ca7947de8 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_data_versions_operations.py @@ -0,0 +1,482 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._data_versions_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class DataVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`data_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.DataVersionBase"]: + """List data versions in the data container. + + List data versions in the data container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Data container's name. Required. + :type name: str + :param order_by: Please choose OrderBy value from ['createdtime', 'modifiedtime']. Default + value is None. + :type order_by: str + :param top: Top count of results, top count cannot be greater than the page size. + If topCount > page size, results with be default page size count + will be returned. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :type tags: str + :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DataVersionBase or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.DataVersionBase] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DataVersionBaseResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + order_by=order_by, + top=top, + skip=skip, + tags=tags, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("DataVersionBaseResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> None: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.DataVersionBase: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataVersionBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DataVersionBase] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("DataVersionBase", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}"} # type: ignore + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.DataVersionBase, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DataVersionBase: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataVersionBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DataVersionBase: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataVersionBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.DataVersionBase, IO], + **kwargs: Any + ) -> _models.DataVersionBase: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataVersionBase or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataVersionBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DataVersionBase] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "DataVersionBase") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("DataVersionBase", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("DataVersionBase", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_datastores_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_datastores_operations.py new file mode 100644 index 000000000000..a6007cd6daa4 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_datastores_operations.py @@ -0,0 +1,541 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, List, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._datastores_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, + build_list_secrets_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class DatastoresOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`datastores` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + count: int = 30, + is_default: Optional[bool] = None, + names: Optional[List[str]] = None, + search_text: Optional[str] = None, + order_by: Optional[str] = None, + order_by_asc: bool = False, + **kwargs: Any + ) -> AsyncIterable["_models.Datastore"]: + """List datastores. + + List datastores. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param count: Maximum number of results to return. Default value is 30. + :type count: int + :param is_default: Filter down to the workspace default datastore. Default value is None. + :type is_default: bool + :param names: Names of datastores to return. Default value is None. + :type names: list[str] + :param search_text: Text to search for in the datastore names. Default value is None. + :type search_text: str + :param order_by: Order by property (createdtime | modifiedtime | name). Default value is None. + :type order_by: str + :param order_by_asc: Order by property in ascending order. Default value is False. + :type order_by_asc: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either Datastore or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.Datastore] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DatastoreResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + count=count, + is_default=is_default, + names=names, + search_text=search_text, + order_by=order_by, + order_by_asc=order_by_asc, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("DatastoreResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: + """Delete datastore. + + Delete datastore. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Datastore name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}"} # type: ignore + + @distributed_trace_async + async def get(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> _models.Datastore: + """Get datastore. + + Get datastore. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Datastore name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Datastore or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Datastore + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.Datastore] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("Datastore", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}"} # type: ignore + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.Datastore, + skip_validation: bool = False, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Datastore: + """Create or update datastore. + + Create or update datastore. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Datastore name. Required. + :type name: str + :param body: Datastore entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.Datastore + :param skip_validation: Flag to skip validation. Default value is False. + :type skip_validation: bool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Datastore or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Datastore + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + skip_validation: bool = False, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Datastore: + """Create or update datastore. + + Create or update datastore. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Datastore name. Required. + :type name: str + :param body: Datastore entity to create or update. Required. + :type body: IO + :param skip_validation: Flag to skip validation. Default value is False. + :type skip_validation: bool + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Datastore or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Datastore + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.Datastore, IO], + skip_validation: bool = False, + **kwargs: Any + ) -> _models.Datastore: + """Create or update datastore. + + Create or update datastore. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Datastore name. Required. + :type name: str + :param body: Datastore entity to create or update. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.Datastore or IO + :param skip_validation: Flag to skip validation. Default value is False. + :type skip_validation: bool + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Datastore or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Datastore + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Datastore] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "Datastore") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + skip_validation=skip_validation, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("Datastore", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("Datastore", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}"} # type: ignore + + @distributed_trace_async + async def list_secrets( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.DatastoreSecrets: + """Get datastore secrets. + + Get datastore secrets. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Datastore name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DatastoreSecrets or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DatastoreSecrets + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DatastoreSecrets] + + request = build_list_secrets_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_secrets.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("DatastoreSecrets", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_secrets.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}/listSecrets"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_environment_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_environment_containers_operations.py new file mode 100644 index 000000000000..bc2b2474592f --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_environment_containers_operations.py @@ -0,0 +1,447 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._environment_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class EnvironmentContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`environment_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.EnvironmentContainer"]: + """List environment containers. + + List environment containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either EnvironmentContainer or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("EnvironmentContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.EnvironmentContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentContainer] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}"} # type: ignore + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.EnvironmentContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EnvironmentContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EnvironmentContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.EnvironmentContainer, IO], + **kwargs: Any + ) -> _models.EnvironmentContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentContainer] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "EnvironmentContainer") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_environment_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_environment_versions_operations.py new file mode 100644 index 000000000000..c9342cc30b3b --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_environment_versions_operations.py @@ -0,0 +1,473 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._environment_versions_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class EnvironmentVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`environment_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.EnvironmentVersion"]: + """List versions. + + List versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either EnvironmentVersion or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + order_by=order_by, + top=top, + skip=skip, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("EnvironmentVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> None: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.EnvironmentVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentVersion] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}"} # type: ignore + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.EnvironmentVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EnvironmentVersion: + """Creates or updates an EnvironmentVersion. + + Creates or updates an EnvironmentVersion. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Name of EnvironmentVersion. This is case-sensitive. Required. + :type name: str + :param version: Version of EnvironmentVersion. Required. + :type version: str + :param body: Definition of EnvironmentVersion. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EnvironmentVersion: + """Creates or updates an EnvironmentVersion. + + Creates or updates an EnvironmentVersion. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Name of EnvironmentVersion. This is case-sensitive. Required. + :type name: str + :param version: Version of EnvironmentVersion. Required. + :type version: str + :param body: Definition of EnvironmentVersion. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.EnvironmentVersion, IO], + **kwargs: Any + ) -> _models.EnvironmentVersion: + """Creates or updates an EnvironmentVersion. + + Creates or updates an EnvironmentVersion. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Name of EnvironmentVersion. This is case-sensitive. Required. + :type name: str + :param version: Version of EnvironmentVersion. Required. + :type version: str + :param body: Definition of EnvironmentVersion. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentVersion] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "EnvironmentVersion") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_jobs_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_jobs_operations.py new file mode 100644 index 000000000000..8219eba6296a --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_jobs_operations.py @@ -0,0 +1,639 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._jobs_operations import ( + build_cancel_request, + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class JobsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`jobs` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + job_type: Optional[str] = None, + tag: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + scheduled: Optional[bool] = None, + schedule_id: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_models.JobBase"]: + """Lists Jobs in the workspace. + + Lists Jobs in the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param job_type: Type of job to be returned. Default value is None. + :type job_type: str + :param tag: Jobs returned will have this tag key. Default value is None. + :type tag: str + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param scheduled: Indicator whether the job is scheduled job. Default value is None. + :type scheduled: bool + :param schedule_id: The scheduled id for listing the job triggered from. Default value is None. + :type schedule_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either JobBase or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.JobBase] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.JobBaseResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + job_type=job_type, + tag=tag, + list_view_type=list_view_type, + scheduled=scheduled, + schedule_id=schedule_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("JobBaseResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Deletes a Job (asynchronous). + + Deletes a Job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + + @distributed_trace_async + async def get(self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any) -> _models.JobBase: + """Gets a Job by name/id. + + Gets a Job by name/id. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.JobBase] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("JobBase", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.JobBase, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.JobBase: + """Creates and executes a Job. + + Creates and executes a Job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :param body: Job definition object. Required. + :type body: ~azure.mgmt.machinelearningservices.models.JobBase + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.JobBase: + """Creates and executes a Job. + + Creates and executes a Job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :param body: Job definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, resource_group_name: str, workspace_name: str, id: str, body: Union[_models.JobBase, IO], **kwargs: Any + ) -> _models.JobBase: + """Creates and executes a Job. + + Creates and executes a Job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :param body: Job definition object. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.JobBase or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.JobBase] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "JobBase") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("JobBase", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("JobBase", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + + async def _cancel_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_cancel_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._cancel_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _cancel_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel"} # type: ignore + + @distributed_trace_async + async def begin_cancel( + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Cancels a Job (asynchronous). + + Cancels a Job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._cancel_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_cancel.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_labeling_jobs_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_labeling_jobs_operations.py new file mode 100644 index 000000000000..e210bf43ca50 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_labeling_jobs_operations.py @@ -0,0 +1,984 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._labeling_jobs_operations import ( + build_create_or_update_request, + build_delete_request, + build_export_labels_request, + build_get_request, + build_list_request, + build_pause_request, + build_resume_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class LabelingJobsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`labeling_jobs` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + count: Optional[int] = None, + **kwargs: Any + ) -> AsyncIterable["_models.LabelingJob"]: + """Lists labeling jobs in the workspace. + + Lists labeling jobs in the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param count: Number of labeling jobs to return. Default value is None. + :type count: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either LabelingJob or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.LabelingJobResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + count=count, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("LabelingJobResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> None: + """Delete a labeling job. + + Delete a labeling job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + workspace_name: str, + id: str, + include_job_instructions: bool = False, + include_label_categories: bool = False, + **kwargs: Any + ) -> _models.LabelingJob: + """Gets a labeling job by name/id. + + Gets a labeling job by name/id. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param include_job_instructions: Boolean value to indicate whether to include JobInstructions + in response. Default value is False. + :type include_job_instructions: bool + :param include_label_categories: Boolean value to indicate Whether to include LabelCategories + in response. Default value is False. + :type include_label_categories: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LabelingJob or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.LabelingJob + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.LabelingJob] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + include_job_instructions=include_job_instructions, + include_label_categories=include_label_categories, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("LabelingJob", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.LabelingJob, IO], + **kwargs: Any + ) -> _models.LabelingJob: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.LabelingJob] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "LabelingJob") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("LabelingJob", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("LabelingJob", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.LabelingJob, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.LabelingJob]: + """Creates or updates a labeling job (asynchronous). + + Creates or updates a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: LabelingJob definition object. Required. + :type body: ~azure.mgmt.machinelearningservices.models.LabelingJob + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either LabelingJob or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.LabelingJob]: + """Creates or updates a labeling job (asynchronous). + + Creates or updates a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: LabelingJob definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either LabelingJob or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.LabelingJob, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.LabelingJob]: + """Creates or updates a labeling job (asynchronous). + + Creates or updates a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: LabelingJob definition object. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.LabelingJob or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either LabelingJob or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.LabelingJob] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("LabelingJob", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore + + async def _export_labels_initial( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.ExportSummary, IO], + **kwargs: Any + ) -> Optional[_models.ExportSummary]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.ExportSummary]] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ExportSummary") + + request = build_export_labels_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._export_labels_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("ExportSummary", pipeline_response) + + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _export_labels_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels"} # type: ignore + + @overload + async def begin_export_labels( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.ExportSummary, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ExportSummary]: + """Export labels from a labeling job (asynchronous). + + Export labels from a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: The export summary. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ExportSummary + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ExportSummary or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ExportSummary] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_export_labels( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ExportSummary]: + """Export labels from a labeling job (asynchronous). + + Export labels from a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: The export summary. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ExportSummary or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ExportSummary] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_export_labels( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.ExportSummary, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.ExportSummary]: + """Export labels from a labeling job (asynchronous). + + Export labels from a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: The export summary. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ExportSummary or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ExportSummary or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ExportSummary] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ExportSummary] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._export_labels_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ExportSummary", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_export_labels.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels"} # type: ignore + + @distributed_trace_async + async def pause( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> None: + """Pause a labeling job. + + Pause a labeling job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_pause_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.pause.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + pause.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/pause"} # type: ignore + + async def _resume_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_resume_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._resume_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _resume_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume"} # type: ignore + + @distributed_trace_async + async def begin_resume( + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Resume a labeling job (asynchronous). + + Resume a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._resume_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_resume.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_machine_learning_compute_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_machine_learning_compute_operations.py deleted file mode 100644 index 3efc5f75d467..000000000000 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_machine_learning_compute_operations.py +++ /dev/null @@ -1,895 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling - -from ... import models as _models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class MachineLearningComputeOperations: - """MachineLearningComputeOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = _models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_workspace( - self, - resource_group_name: str, - workspace_name: str, - skiptoken: Optional[str] = None, - **kwargs - ) -> AsyncIterable["_models.PaginatedComputeResourcesList"]: - """Gets computes in specified workspace. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param skiptoken: Continuation token for pagination. - :type skiptoken: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PaginatedComputeResourcesList or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedComputeResourcesList] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PaginatedComputeResourcesList"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - if not next_link: - # Construct URL - url = self.list_by_workspace.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if skiptoken is not None: - query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('PaginatedComputeResourcesList', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes'} # type: ignore - - async def get( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - **kwargs - ) -> "_models.ComputeResource": - """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are - not returned - use 'keys' nested resource to get them. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComputeResource, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.ComputeResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - async def _create_or_update_initial( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - parameters: "_models.ComputeResource", - **kwargs - ) -> "_models.ComputeResource": - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._create_or_update_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'ComputeResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if response.status_code == 201: - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized - _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - async def begin_create_or_update( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - parameters: "_models.ComputeResource", - **kwargs - ) -> AsyncLROPoller["_models.ComputeResource"]: - """Creates or updates compute. This call will overwrite a compute if it exists. This is a - nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify - that it does not exist yet. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :param parameters: Payload with Machine Learning compute definition. - :type parameters: ~azure.mgmt.machinelearningservices.models.ComputeResource - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._create_or_update_initial( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - compute_name=compute_name, - parameters=parameters, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - async def _update_initial( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - parameters: "_models.ClusterUpdateParameters", - **kwargs - ) -> "_models.ComputeResource": - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._update_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'ClusterUpdateParameters') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - async def begin_update( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - parameters: "_models.ClusterUpdateParameters", - **kwargs - ) -> AsyncLROPoller["_models.ComputeResource"]: - """Updates properties of a compute. This call will overwrite a compute if it exists. This is a - nonrecoverable operation. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :param parameters: Additional parameters for cluster update. - :type parameters: ~azure.mgmt.machinelearningservices.models.ClusterUpdateParameters - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._update_initial( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - compute_name=compute_name, - parameters=parameters, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - async def _delete_initial( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - underlying_resource_action: Union[str, "_models.UnderlyingResourceAction"], - **kwargs - ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - query_parameters['underlyingResourceAction'] = self._serialize.query("underlying_resource_action", underlying_resource_action, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - response_headers = {} - if response.status_code == 202: - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - - if cls: - return cls(pipeline_response, None, response_headers) - - _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - async def begin_delete( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - underlying_resource_action: Union[str, "_models.UnderlyingResourceAction"], - **kwargs - ) -> AsyncLROPoller[None]: - """Deletes specified Machine Learning compute. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the - underlying compute from workspace if 'Detach'. - :type underlying_resource_action: str or ~azure.mgmt.machinelearningservices.models.UnderlyingResourceAction - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._delete_initial( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - compute_name=compute_name, - underlying_resource_action=underlying_resource_action, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - async def list_nodes( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - **kwargs - ) -> "_models.AmlComputeNodesInformation": - """Get the details (e.g IP address, port etc) of all the compute nodes in the compute. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: AmlComputeNodesInformation, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.AmlComputeNodesInformation - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.AmlComputeNodesInformation"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.list_nodes.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize('AmlComputeNodesInformation', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - list_nodes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes'} # type: ignore - - async def list_keys( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - **kwargs - ) -> "_models.ComputeSecrets": - """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc). - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComputeSecrets, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.ComputeSecrets - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeSecrets"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.list_keys.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ComputeSecrets', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys'} # type: ignore - - async def start( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - **kwargs - ) -> None: - """Posts a start action to a compute instance. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.start.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} # type: ignore - - async def stop( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - **kwargs - ) -> None: - """Posts a stop action to a compute instance. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.stop.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} # type: ignore - - async def restart( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - **kwargs - ) -> None: - """Posts a restart action to a compute instance. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.restart.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart'} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_model_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_model_containers_operations.py new file mode 100644 index 000000000000..fbc324846fa6 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_model_containers_operations.py @@ -0,0 +1,450 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._model_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class ModelContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`model_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + count: Optional[int] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.ModelContainer"]: + """List model containers. + + List model containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param count: Maximum number of results to return. Default value is None. + :type count: int + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ModelContainer or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ModelContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + count=count, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ModelContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.ModelContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelContainer] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ModelContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}"} # type: ignore + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.ModelContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ModelContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ModelContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.ModelContainer, IO], + **kwargs: Any + ) -> _models.ModelContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelContainer] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ModelContainer") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ModelContainer", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ModelContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_model_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_model_versions_operations.py new file mode 100644 index 000000000000..3831b9a2688e --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_model_versions_operations.py @@ -0,0 +1,499 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._model_versions_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class ModelVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`model_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + skip: Optional[str] = None, + order_by: Optional[str] = None, + top: Optional[int] = None, + version: Optional[str] = None, + description: Optional[str] = None, + offset: Optional[int] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + feed: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.ModelVersion"]: + """List model versions. + + List model versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Model name. This is case-sensitive. Required. + :type name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param version: Model version. Default value is None. + :type version: str + :param description: Model description. Default value is None. + :type description: str + :param offset: Number of initial results to skip. Default value is None. + :type offset: int + :param tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :type tags: str + :param properties: Comma-separated list of property names (and optionally values). Example: + prop1,prop2=value2. Default value is None. + :type properties: str + :param feed: Name of the feed. Default value is None. + :type feed: str + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ModelVersion or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ModelVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + skip=skip, + order_by=order_by, + top=top, + version=version, + description=description, + offset=offset, + tags=tags, + properties=properties, + feed=feed, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ModelVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> None: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.ModelVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelVersion] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ModelVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}"} # type: ignore + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.ModelVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ModelVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ModelVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.ModelVersion, IO], + **kwargs: Any + ) -> _models.ModelVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelVersion] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ModelVersion") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ModelVersion", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ModelVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_notebooks_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_notebooks_operations.py deleted file mode 100644 index b1dac0e119be..000000000000 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_notebooks_operations.py +++ /dev/null @@ -1,160 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling - -from ... import models as _models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class NotebooksOperations: - """NotebooksOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = _models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def _prepare_initial( - self, - resource_group_name: str, - workspace_name: str, - **kwargs - ) -> Optional["_models.NotebookResourceInfo"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.NotebookResourceInfo"]] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self._prepare_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('NotebookResourceInfo', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _prepare_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore - - async def begin_prepare( - self, - resource_group_name: str, - workspace_name: str, - **kwargs - ) -> AsyncLROPoller["_models.NotebookResourceInfo"]: - """prepare. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either NotebookResourceInfo or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.NotebookResourceInfo] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.NotebookResourceInfo"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._prepare_initial( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('NotebookResourceInfo', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_prepare.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_online_deployments_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_online_deployments_operations.py new file mode 100644 index 000000000000..a1273ed6cd35 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_online_deployments_operations.py @@ -0,0 +1,1158 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._online_deployments_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_logs_request, + build_get_request, + build_list_request, + build_list_skus_request, + build_update_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class OnlineDeploymentsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`online_deployments` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_models.OnlineDeployment"]: + """List Inference Endpoint Deployments. + + List Inference Endpoint Deployments. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Top of list. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OnlineDeployment or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.OnlineDeploymentTrackedResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + order_by=order_by, + top=top, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("OnlineDeploymentTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete Inference Endpoint Deployment (asynchronous). + + Delete Inference Endpoint Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any + ) -> _models.OnlineDeployment: + """Get Inference Deployment Deployment. + + Get Inference Deployment Deployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: OnlineDeployment or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.OnlineDeployment + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.OnlineDeployment] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("OnlineDeployment", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.PartialMinimalTrackedResourceWithSku, IO], + **kwargs: Any + ) -> Optional[_models.OnlineDeployment]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.OnlineDeployment]] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialMinimalTrackedResourceWithSku") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("OnlineDeployment", pipeline_response) + + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.PartialMinimalTrackedResourceWithSku, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineDeployment]: + """Update Online Deployment (asynchronous). + + Update Online Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSku + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineDeployment]: + """Update Online Deployment (asynchronous). + + Update Online Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.PartialMinimalTrackedResourceWithSku, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineDeployment]: + """Update Online Deployment (asynchronous). + + Update Online Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :param body: Online Endpoint entity to apply during operation. Is either a model type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSku or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.OnlineDeployment] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("OnlineDeployment", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.OnlineDeployment, IO], + **kwargs: Any + ) -> _models.OnlineDeployment: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.OnlineDeployment] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "OnlineDeployment") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("OnlineDeployment", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("OnlineDeployment", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.OnlineDeployment, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineDeployment]: + """Create or update Inference Endpoint Deployment (asynchronous). + + Create or update Inference Endpoint Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :param body: Inference Endpoint entity to apply during operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OnlineDeployment + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineDeployment]: + """Create or update Inference Endpoint Deployment (asynchronous). + + Create or update Inference Endpoint Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :param body: Inference Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.OnlineDeployment, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineDeployment]: + """Create or update Inference Endpoint Deployment (asynchronous). + + Create or update Inference Endpoint Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :param body: Inference Endpoint entity to apply during operation. Is either a model type or a + IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OnlineDeployment or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.OnlineDeployment] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("OnlineDeployment", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @overload + async def get_logs( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.DeploymentLogsRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DeploymentLogs: + """Polls an Endpoint operation. + + Polls an Endpoint operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The name and identifier for the endpoint. Required. + :type deployment_name: str + :param body: The request containing parameters for retrieving logs. Required. + :type body: ~azure.mgmt.machinelearningservices.models.DeploymentLogsRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DeploymentLogs or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DeploymentLogs + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def get_logs( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DeploymentLogs: + """Polls an Endpoint operation. + + Polls an Endpoint operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The name and identifier for the endpoint. Required. + :type deployment_name: str + :param body: The request containing parameters for retrieving logs. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DeploymentLogs or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DeploymentLogs + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def get_logs( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.DeploymentLogsRequest, IO], + **kwargs: Any + ) -> _models.DeploymentLogs: + """Polls an Endpoint operation. + + Polls an Endpoint operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The name and identifier for the endpoint. Required. + :type deployment_name: str + :param body: The request containing parameters for retrieving logs. Is either a model type or a + IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.DeploymentLogsRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DeploymentLogs or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DeploymentLogs + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DeploymentLogs] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "DeploymentLogsRequest") + + request = build_get_logs_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.get_logs.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("DeploymentLogs", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_logs.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/getLogs"} # type: ignore + + @distributed_trace + def list_skus( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + count: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_models.SkuResource"]: + """List Inference Endpoint Deployment Skus. + + List Inference Endpoint Deployment Skus. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :param count: Number of Skus to be retrieved in a page of results. Default value is None. + :type count: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either SkuResource or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.SkuResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.SkuResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_skus_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + count=count, + skip=skip, + api_version=api_version, + template_url=self.list_skus.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("SkuResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list_skus.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/skus"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_online_endpoints_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_online_endpoints_operations.py new file mode 100644 index 000000000000..0bb9becebd1b --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_online_endpoints_operations.py @@ -0,0 +1,1243 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._online_endpoints_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_get_token_request, + build_list_keys_request, + build_list_request, + build_regenerate_keys_request, + build_update_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class OnlineEndpointsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`online_endpoints` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: Optional[str] = None, + count: Optional[int] = None, + compute_type: Optional[Union[str, _models.EndpointComputeType]] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + order_by: Optional[Union[str, _models.OrderString]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.OnlineEndpoint"]: + """List Online Endpoints. + + List Online Endpoints. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Name of the endpoint. Default value is None. + :type name: str + :param count: Number of endpoints to be retrieved in a page of results. Default value is None. + :type count: int + :param compute_type: EndpointComputeType to be filtered by. Known values are: "Managed", + "Kubernetes", and "AzureMLCompute". Default value is None. + :type compute_type: str or ~azure.mgmt.machinelearningservices.models.EndpointComputeType + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: A set of tags with which to filter the returned models. It is a comma separated + string of tags key or tags key=value. Example: tagKey1,tagKey2,tagKey3=value3 . Default value + is None. + :type tags: str + :param properties: A set of properties with which to filter the returned models. It is a comma + separated string of properties key and/or properties key=value Example: + propKey1,propKey2,propKey3=value3 . Default value is None. + :type properties: str + :param order_by: The option to order the response. Known values are: "CreatedAtDesc", + "CreatedAtAsc", "UpdatedAtDesc", and "UpdatedAtAsc". Default value is None. + :type order_by: str or ~azure.mgmt.machinelearningservices.models.OrderString + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OnlineEndpoint or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.OnlineEndpointTrackedResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + name=name, + count=count, + compute_type=compute_type, + skip=skip, + tags=tags, + properties=properties, + order_by=order_by, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("OnlineEndpointTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete Online Endpoint (asynchronous). + + Delete Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> _models.OnlineEndpoint: + """Get Online Endpoint. + + Get Online Endpoint. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: OnlineEndpoint or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.OnlineEndpoint + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.OnlineEndpoint] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.PartialMinimalTrackedResourceWithIdentity, IO], + **kwargs: Any + ) -> Optional[_models.OnlineEndpoint]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.OnlineEndpoint]] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialMinimalTrackedResourceWithIdentity") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) + + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.PartialMinimalTrackedResourceWithIdentity, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineEndpoint]: + """Update Online Endpoint (asynchronous). + + Update Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineEndpoint]: + """Update Online Endpoint (asynchronous). + + Update Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.PartialMinimalTrackedResourceWithIdentity, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineEndpoint]: + """Update Online Endpoint (asynchronous). + + Update Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. Is either a model type or a IO + type. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.OnlineEndpoint] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.OnlineEndpoint, IO], + **kwargs: Any + ) -> _models.OnlineEndpoint: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.OnlineEndpoint] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "OnlineEndpoint") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.OnlineEndpoint, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineEndpoint]: + """Create or update Online Endpoint (asynchronous). + + Create or update Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OnlineEndpoint + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineEndpoint]: + """Create or update Online Endpoint (asynchronous). + + Create or update Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.OnlineEndpoint, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineEndpoint]: + """Create or update Online Endpoint (asynchronous). + + Create or update Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. Is either a model type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OnlineEndpoint or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.OnlineEndpoint] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + @distributed_trace_async + async def list_keys( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> _models.EndpointAuthKeys: + """List EndpointAuthKeys for an Endpoint using Key-based authentication. + + List EndpointAuthKeys for an Endpoint using Key-based authentication. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EndpointAuthKeys or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.EndpointAuthKeys] + + request = build_list_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_keys.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("EndpointAuthKeys", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/listKeys"} # type: ignore + + async def _regenerate_keys_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.RegenerateEndpointKeysRequest, IO], + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "RegenerateEndpointKeysRequest") + + request = build_regenerate_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._regenerate_keys_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _regenerate_keys_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys"} # type: ignore + + @overload + async def begin_regenerate_keys( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.RegenerateEndpointKeysRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: RegenerateKeys request . Required. + :type body: ~azure.mgmt.machinelearningservices.models.RegenerateEndpointKeysRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_regenerate_keys( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: RegenerateKeys request . Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_regenerate_keys( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.RegenerateEndpointKeysRequest, IO], + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: RegenerateKeys request . Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.RegenerateEndpointKeysRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._regenerate_keys_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_regenerate_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys"} # type: ignore + + @distributed_trace_async + async def get_token( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> _models.EndpointAuthToken: + """Retrieve a valid AAD token for an Endpoint using AMLToken-based authentication. + + Retrieve a valid AAD token for an Endpoint using AMLToken-based authentication. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EndpointAuthToken or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthToken + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.EndpointAuthToken] + + request = build_get_token_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_token.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("EndpointAuthToken", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_token.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/token"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_operations.py index 6b2ed5f29d08..99c67d82df68 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,82 +6,101 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +from urllib.parse import parse_qs, urljoin, urlparse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._vendor import _convert_request +from ...operations._operations import build_list_request -T = TypeVar('T') +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class Operations: - """Operations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class Operations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`operations` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - def list( - self, - **kwargs - ) -> AsyncIterable["_models.OperationListResult"]: - """Lists all of the available Azure Machine Learning Workspaces REST API operations. + @distributed_trace + def list(self, **kwargs: Any) -> AsyncIterable["_models.AmlOperation"]: + """Lists all of the available Azure Machine Learning Services REST API operations. :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OperationListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.OperationListResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either AmlOperation or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.AmlOperation] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.AmlOperationListResult] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - request = self._client.get(url, query_parameters, header_parameters) + request = build_list_request( + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('OperationListResult', pipeline_response) + deserialized = self._deserialize("AmlOperationListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -89,17 +109,18 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/providers/Microsoft.MachineLearningServices/operations'} # type: ignore + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/providers/Microsoft.MachineLearningServices/operations"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_patch.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_patch.py new file mode 100644 index 000000000000..f7dd32510333 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py index 5ef7c0591ff1..ca5b36b23560 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,292 +6,419 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models - -T = TypeVar('T') +from ..._vendor import _convert_request +from ...operations._private_endpoint_connections_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class PrivateEndpointConnectionsOperations: - """PrivateEndpointConnectionsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class PrivateEndpointConnectionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`private_endpoint_connections` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncIterable["_models.PrivateEndpointConnection"]: + """List all the private endpoint connections associated with the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PrivateEndpointConnection or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnectionListResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("PrivateEndpointConnectionListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections"} # type: ignore + + @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - private_endpoint_connection_name: str, - **kwargs - ) -> "_models.PrivateEndpointConnection": + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> _models.PrivateEndpointConnection: """Gets the specified private endpoint connection associated with the workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the workspace. + with the workspace. Required. :type private_endpoint_connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PrivateEndpointConnection, or the result of cls(response) + :return: PrivateEndpointConnection or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnection] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore - async def put( + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore + + @overload + async def create_or_update( self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, - properties: "_models.PrivateEndpointConnection", - **kwargs - ) -> "_models.PrivateEndpointConnection": + properties: _models.PrivateEndpointConnection, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PrivateEndpointConnection: """Update the state of specified private endpoint connection associated with the workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the workspace. + with the workspace. Required. :type private_endpoint_connection_name: str - :param properties: The private endpoint connection properties. + :param properties: The private endpoint connection properties. Required. :type properties: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PrivateEndpointConnection, or the result of cls(response) + :return: PrivateEndpointConnection or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.put.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(properties, 'PrivateEndpointConnection') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + properties: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PrivateEndpointConnection: + """Update the state of specified private endpoint connection associated with the workspace. - return deserialized - put.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the workspace. Required. + :type private_endpoint_connection_name: str + :param properties: The private endpoint connection properties. Required. + :type properties: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnection or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ - async def _delete_initial( + @distributed_trace_async + async def create_or_update( self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, - **kwargs - ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] + properties: Union[_models.PrivateEndpointConnection, IO], + **kwargs: Any + ) -> _models.PrivateEndpointConnection: + """Update the state of specified private endpoint connection associated with the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the workspace. Required. + :type private_endpoint_connection_name: str + :param properties: The private endpoint connection properties. Is either a model type or a IO + type. Required. + :type properties: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnection or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnection] - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(properties, (IO, bytes)): + _content = properties + else: + _json = self._serialize.body(properties, "PrivateEndpointConnection") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 202, 204]: + if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) - _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + return deserialized - async def begin_delete( - self, - resource_group_name: str, - workspace_name: str, - private_endpoint_connection_name: str, - **kwargs - ) -> AsyncLROPoller[None]: + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> None: """Deletes the specified private endpoint connection associated with the workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the workspace. + with the workspace. Required. :type private_endpoint_connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] + :return: None or the result of cls(response) + :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._delete_initial( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - private_endpoint_connection_name=private_endpoint_connection_name, - cls=lambda x,y,z: x, - **kwargs - ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) + response = pipeline_response.http_response - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_link_resources_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_link_resources_operations.py index 882726b29838..e4a9c4e2e249 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_link_resources_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_link_resources_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,95 +6,108 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, Optional, TypeVar + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._vendor import _convert_request +from ...operations._private_link_resources_operations import build_list_request -T = TypeVar('T') +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class PrivateLinkResourcesOperations: - """PrivateLinkResourcesOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class PrivateLinkResourcesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`private_link_resources` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def list_by_workspace( - self, - resource_group_name: str, - workspace_name: str, - **kwargs - ) -> "_models.PrivateLinkResourceListResult": + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def list( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.PrivateLinkResourceListResult: """Gets the private link resources that need to be created for a workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PrivateLinkResourceListResult, or the result of cls(response) + :return: PrivateLinkResourceListResult or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.PrivateLinkResourceListResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateLinkResourceListResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.list_by_workspace.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateLinkResourceListResult] + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('PrivateLinkResourceListResult', pipeline_response) + deserialized = self._deserialize("PrivateLinkResourceListResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources'} # type: ignore + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_quotas_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_quotas_operations.py index 6bdae569077f..db5214ae622f 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_quotas_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_quotas_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,153 +6,224 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._vendor import _convert_request +from ...operations._quotas_operations import build_list_request, build_update_request -T = TypeVar('T') +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class QuotasOperations: - """QuotasOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class QuotasOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`quotas` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @overload async def update( self, location: str, - parameters: "_models.QuotaUpdateParameters", - **kwargs - ) -> "_models.UpdateWorkspaceQuotasResult": + parameters: _models.QuotaUpdateParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.UpdateWorkspaceQuotasResult: """Update quota for each VM family in workspace. - :param location: The location for update quota is queried. + :param location: The location for update quota is queried. Required. :type location: str - :param parameters: Quota update parameters. + :param parameters: Quota update parameters. Required. :type parameters: ~azure.mgmt.machinelearningservices.models.QuotaUpdateParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: UpdateWorkspaceQuotasResult or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotasResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update( + self, location: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.UpdateWorkspaceQuotasResult: + """Update quota for each VM family in workspace. + + :param location: The location for update quota is queried. Required. + :type location: str + :param parameters: Quota update parameters. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: UpdateWorkspaceQuotasResult or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotasResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def update( + self, location: str, parameters: Union[_models.QuotaUpdateParameters, IO], **kwargs: Any + ) -> _models.UpdateWorkspaceQuotasResult: + """Update quota for each VM family in workspace. + + :param location: The location for update quota is queried. Required. + :type location: str + :param parameters: Quota update parameters. Is either a model type or a IO type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.QuotaUpdateParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: UpdateWorkspaceQuotasResult, or the result of cls(response) + :return: UpdateWorkspaceQuotasResult or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotasResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.UpdateWorkspaceQuotasResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'QuotaUpdateParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.UpdateWorkspaceQuotasResult] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "QuotaUpdateParameters") + + request = build_update_request( + location=location, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('UpdateWorkspaceQuotasResult', pipeline_response) + deserialized = self._deserialize("UpdateWorkspaceQuotasResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas'} # type: ignore - def list( - self, - location: str, - **kwargs - ) -> AsyncIterable["_models.ListWorkspaceQuotas"]: + update.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas"} # type: ignore + + @distributed_trace + def list(self, location: str, **kwargs: Any) -> AsyncIterable["_models.ResourceQuota"]: """Gets the currently assigned Workspace Quotas based on VMFamily. - :param location: The location for which resource usage is queried. + :param location: The location for which resource usage is queried. Required. :type location: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ListWorkspaceQuotas or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ListWorkspaceQuotas] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either ResourceQuota or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ResourceQuota] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListWorkspaceQuotas"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ListWorkspaceQuotas] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + location=location, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('ListWorkspaceQuotas', pipeline_response) + deserialized = self._deserialize("ListWorkspaceQuotas", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -160,16 +232,18 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/Quotas'} # type: ignore + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/quotas"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registries_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registries_operations.py new file mode 100644 index 000000000000..78ef4987b36d --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registries_operations.py @@ -0,0 +1,717 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._registries_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_by_subscription_request, + build_list_request, + build_update_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class RegistriesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`registries` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list_by_subscription(self, skip: Optional[str] = None, **kwargs: Any) -> AsyncIterable["_models.Registry"]: + """List registries by subscription. + + List registries by subscription. + + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either Registry or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.Registry] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.RegistryTrackedResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + skip=skip, + api_version=api_version, + template_url=self.list_by_subscription.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("RegistryTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list_by_subscription.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/registries"} # type: ignore + + @distributed_trace + def list( + self, resource_group_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> AsyncIterable["_models.Registry"]: + """List registries. + + List registries. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either Registry or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.Registry] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.RegistryTrackedResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("RegistryTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, registry_name: str, **kwargs: Any + ) -> None: + """Delete registry. + + Delete registry. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of registry. This is case-insensitive. Required. + :type registry_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}"} # type: ignore + + @distributed_trace_async + async def get(self, resource_group_name: str, registry_name: str, **kwargs: Any) -> _models.Registry: + """Get registry. + + Get registry. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of registry. This is case-insensitive. Required. + :type registry_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Registry or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Registry + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.Registry] + + request = build_get_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("Registry", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}"} # type: ignore + + @overload + async def update( + self, + resource_group_name: str, + registry_name: str, + body: _models.PartialRegistryPartialTrackedResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Registry: + """Update tags. + + Update tags. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of registry. This is case-insensitive. Required. + :type registry_name: str + :param body: Details required to create the registry. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialRegistryPartialTrackedResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Registry or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Registry + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update( + self, + resource_group_name: str, + registry_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Registry: + """Update tags. + + Update tags. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of registry. This is case-insensitive. Required. + :type registry_name: str + :param body: Details required to create the registry. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Registry or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Registry + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def update( + self, + resource_group_name: str, + registry_name: str, + body: Union[_models.PartialRegistryPartialTrackedResource, IO], + **kwargs: Any + ) -> _models.Registry: + """Update tags. + + Update tags. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of registry. This is case-insensitive. Required. + :type registry_name: str + :param body: Details required to create the registry. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialRegistryPartialTrackedResource or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Registry or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Registry + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Registry] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialRegistryPartialTrackedResource") + + request = build_update_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("Registry", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}"} # type: ignore + + async def _create_or_update_initial( + self, resource_group_name: str, registry_name: str, body: Union[_models.Registry, IO], **kwargs: Any + ) -> _models.Registry: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Registry] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "Registry") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("Registry", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("Registry", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + body: _models.Registry, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Registry]: + """Create or update registry. + + Create or update registry. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of registry. This is case-insensitive. Required. + :type registry_name: str + :param body: Details required to create the registry. Required. + :type body: ~azure.mgmt.machinelearningservices.models.Registry + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Registry or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Registry] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Registry]: + """Create or update registry. + + Create or update registry. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of registry. This is case-insensitive. Required. + :type registry_name: str + :param body: Details required to create the registry. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Registry or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Registry] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, resource_group_name: str, registry_name: str, body: Union[_models.Registry, IO], **kwargs: Any + ) -> AsyncLROPoller[_models.Registry]: + """Create or update registry. + + Create or update registry. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of registry. This is case-insensitive. Required. + :type registry_name: str + :param body: Details required to create the registry. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.Registry or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Registry or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Registry] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Registry] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("Registry", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_code_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_code_containers_operations.py new file mode 100644 index 000000000000..268e903c67f3 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_code_containers_operations.py @@ -0,0 +1,594 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._registry_code_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class RegistryCodeContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`registry_code_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, registry_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> AsyncIterable["_models.CodeContainer"]: + """List containers. + + List containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either CodeContainer or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.CodeContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + subscription_id=self._config.subscription_id, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("CodeContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, registry_name: str, code_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + code_name=code_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, registry_name: str, code_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + code_name=code_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, registry_name: str, code_name: str, **kwargs: Any + ) -> _models.CodeContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeContainer] + + request = build_get_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + code_name=code_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("CodeContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + body: Union[_models.CodeContainer, IO], + **kwargs: Any + ) -> _models.CodeContainer: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeContainer] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "CodeContainer") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + code_name=code_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("CodeContainer", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("CodeContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + body: _models.CodeContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.CodeContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CodeContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.CodeContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.CodeContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CodeContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.CodeContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + body: Union[_models.CodeContainer, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.CodeContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param body: Container entity to create or update. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CodeContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.CodeContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeContainer] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + code_name=code_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("CodeContainer", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_code_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_code_versions_operations.py new file mode 100644 index 000000000000..0d9309864497 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_code_versions_operations.py @@ -0,0 +1,628 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._registry_code_versions_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class RegistryCodeVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`registry_code_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_models.CodeVersion"]: + """List versions. + + List versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either CodeVersion or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.CodeVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + code_name=code_name, + subscription_id=self._config.subscription_id, + order_by=order_by, + top=top, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("CodeVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, registry_name: str, code_name: str, version: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + code_name=code_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, registry_name: str, code_name: str, version: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param version: Version identifier. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + code_name=code_name, + version=version, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, registry_name: str, code_name: str, version: str, **kwargs: Any + ) -> _models.CodeVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param version: Version identifier. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeVersion] + + request = build_get_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + code_name=code_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("CodeVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + version: str, + body: Union[_models.CodeVersion, IO], + **kwargs: Any + ) -> _models.CodeVersion: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeVersion] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "CodeVersion") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + code_name=code_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("CodeVersion", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("CodeVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + version: str, + body: _models.CodeVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.CodeVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CodeVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.CodeVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.CodeVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CodeVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.CodeVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + version: str, + body: Union[_models.CodeVersion, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.CodeVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CodeVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.CodeVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeVersion] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + code_name=code_name, + version=version, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("CodeVersion", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_component_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_component_containers_operations.py new file mode 100644 index 000000000000..97f99ba3b7e4 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_component_containers_operations.py @@ -0,0 +1,594 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._registry_component_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class RegistryComponentContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`registry_component_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, registry_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> AsyncIterable["_models.ComponentContainer"]: + """List containers. + + List containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ComponentContainer or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ComponentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + subscription_id=self._config.subscription_id, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ComponentContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, registry_name: str, component_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + component_name=component_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, registry_name: str, component_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + component_name=component_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, registry_name: str, component_name: str, **kwargs: Any + ) -> _models.ComponentContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentContainer] + + request = build_get_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + component_name=component_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ComponentContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + body: Union[_models.ComponentContainer, IO], + **kwargs: Any + ) -> _models.ComponentContainer: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentContainer] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ComponentContainer") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + component_name=component_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("ComponentContainer", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ComponentContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + body: _models.ComponentContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ComponentContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComponentContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComponentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ComponentContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComponentContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComponentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + body: Union[_models.ComponentContainer, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.ComponentContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param body: Container entity to create or update. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComponentContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComponentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentContainer] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + component_name=component_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ComponentContainer", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_component_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_component_versions_operations.py new file mode 100644 index 000000000000..28bbc7912528 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_component_versions_operations.py @@ -0,0 +1,628 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._registry_component_versions_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class RegistryComponentVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`registry_component_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_models.ComponentVersion"]: + """List versions. + + List versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ComponentVersion or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ComponentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + component_name=component_name, + subscription_id=self._config.subscription_id, + order_by=order_by, + top=top, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ComponentVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, registry_name: str, component_name: str, version: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + component_name=component_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, registry_name: str, component_name: str, version: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param version: Version identifier. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + component_name=component_name, + version=version, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, registry_name: str, component_name: str, version: str, **kwargs: Any + ) -> _models.ComponentVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param version: Version identifier. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentVersion] + + request = build_get_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + component_name=component_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ComponentVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + version: str, + body: Union[_models.ComponentVersion, IO], + **kwargs: Any + ) -> _models.ComponentVersion: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentVersion] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ComponentVersion") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + component_name=component_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("ComponentVersion", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ComponentVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + version: str, + body: _models.ComponentVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ComponentVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComponentVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComponentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ComponentVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComponentVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComponentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + version: str, + body: Union[_models.ComponentVersion, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.ComponentVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComponentVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComponentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentVersion] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + component_name=component_name, + version=version, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ComponentVersion", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_environment_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_environment_containers_operations.py new file mode 100644 index 000000000000..b60a08413875 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_environment_containers_operations.py @@ -0,0 +1,604 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._registry_environment_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class RegistryEnvironmentContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`registry_environment_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + registry_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.EnvironmentContainer"]: + """List environment containers. + + List environment containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either EnvironmentContainer or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + subscription_id=self._config.subscription_id, + skip=skip, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("EnvironmentContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, registry_name: str, environment_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + environment_name=environment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, registry_name: str, environment_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + environment_name=environment_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, registry_name: str, environment_name: str, **kwargs: Any + ) -> _models.EnvironmentContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param environment_name: Container name. This is case-sensitive. Required. + :type environment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentContainer] + + request = build_get_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + environment_name=environment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + body: Union[_models.EnvironmentContainer, IO], + **kwargs: Any + ) -> _models.EnvironmentContainer: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentContainer] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "EnvironmentContainer") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + environment_name=environment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + body: _models.EnvironmentContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.EnvironmentContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either EnvironmentContainer or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.EnvironmentContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either EnvironmentContainer or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + body: Union[_models.EnvironmentContainer, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.EnvironmentContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :param body: Container entity to create or update. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either EnvironmentContainer or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentContainer] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + environment_name=environment_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_environment_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_environment_versions_operations.py new file mode 100644 index 000000000000..1c3f2f9b85b5 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_environment_versions_operations.py @@ -0,0 +1,633 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._registry_environment_versions_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class RegistryEnvironmentVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`registry_environment_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.EnvironmentVersion"]: + """List versions. + + List versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param environment_name: Container name. This is case-sensitive. Required. + :type environment_name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either EnvironmentVersion or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + environment_name=environment_name, + subscription_id=self._config.subscription_id, + order_by=order_by, + top=top, + skip=skip, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("EnvironmentVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, registry_name: str, environment_name: str, version: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + environment_name=environment_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, registry_name: str, environment_name: str, version: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :param version: Version identifier. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + environment_name=environment_name, + version=version, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, registry_name: str, environment_name: str, version: str, **kwargs: Any + ) -> _models.EnvironmentVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param environment_name: Container name. This is case-sensitive. Required. + :type environment_name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentVersion] + + request = build_get_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + environment_name=environment_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + version: str, + body: Union[_models.EnvironmentVersion, IO], + **kwargs: Any + ) -> _models.EnvironmentVersion: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentVersion] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "EnvironmentVersion") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + environment_name=environment_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + version: str, + body: _models.EnvironmentVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.EnvironmentVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either EnvironmentVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.EnvironmentVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either EnvironmentVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + version: str, + body: Union[_models.EnvironmentVersion, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.EnvironmentVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either EnvironmentVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentVersion] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + environment_name=environment_name, + version=version, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_model_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_model_containers_operations.py new file mode 100644 index 000000000000..41306d8704fd --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_model_containers_operations.py @@ -0,0 +1,603 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._registry_model_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class RegistryModelContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`registry_model_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + registry_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.ModelContainer"]: + """List model containers. + + List model containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ModelContainer or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ModelContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + subscription_id=self._config.subscription_id, + skip=skip, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ModelContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, registry_name: str, model_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, registry_name: str, model_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, registry_name: str, model_name: str, **kwargs: Any + ) -> _models.ModelContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param model_name: Container name. This is case-sensitive. Required. + :type model_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelContainer] + + request = build_get_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ModelContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + body: Union[_models.ModelContainer, IO], + **kwargs: Any + ) -> _models.ModelContainer: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelContainer] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ModelContainer") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("ModelContainer", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ModelContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + body: _models.ModelContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ModelContainer]: + """Create or update model container. + + Create or update model container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ModelContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ModelContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ModelContainer]: + """Create or update model container. + + Create or update model container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ModelContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ModelContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + body: Union[_models.ModelContainer, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.ModelContainer]: + """Create or update model container. + + Create or update model container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :param body: Container entity to create or update. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ModelContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ModelContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelContainer] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ModelContainer", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_model_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_model_versions_operations.py new file mode 100644 index 000000000000..5d2cddcdf133 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_model_versions_operations.py @@ -0,0 +1,651 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._registry_model_versions_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class RegistryModelVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`registry_model_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + skip: Optional[str] = None, + order_by: Optional[str] = None, + top: Optional[int] = None, + version: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.ModelVersion"]: + """List versions. + + List versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param model_name: Container name. This is case-sensitive. Required. + :type model_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param version: Version identifier. Default value is None. + :type version: str + :param description: Model description. Default value is None. + :type description: str + :param tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :type tags: str + :param properties: Comma-separated list of property names (and optionally values). Example: + prop1,prop2=value2. Default value is None. + :type properties: str + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ModelVersion or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ModelVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + subscription_id=self._config.subscription_id, + skip=skip, + order_by=order_by, + top=top, + version=version, + description=description, + tags=tags, + properties=properties, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ModelVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, registry_name: str, model_name: str, version: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, registry_name: str, model_name: str, version: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :param version: Version identifier. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + version=version, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}"} # type: ignore + + @distributed_trace_async + async def get( + self, resource_group_name: str, registry_name: str, model_name: str, version: str, **kwargs: Any + ) -> _models.ModelVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param model_name: Container name. This is case-sensitive. Required. + :type model_name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelVersion] + + request = build_get_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ModelVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: Union[_models.ModelVersion, IO], + **kwargs: Any + ) -> _models.ModelVersion: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelVersion] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ModelVersion") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("ModelVersion", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ModelVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: _models.ModelVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ModelVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ModelVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ModelVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ModelVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ModelVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ModelVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: Union[_models.ModelVersion, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.ModelVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ModelVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ModelVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelVersion] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + version=version, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ModelVersion", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_schedules_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_schedules_operations.py new file mode 100644 index 000000000000..87cf39535dea --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_schedules_operations.py @@ -0,0 +1,582 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._schedules_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class SchedulesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`schedules` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ScheduleListViewType]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.Schedule"]: + """List schedules in specified workspace. + + List schedules in specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: Status filter for schedule. Known values are: "EnabledOnly", + "DisabledOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either Schedule or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.Schedule] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ScheduleResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ScheduleResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete schedule. + + Delete schedule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Schedule name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore + + @distributed_trace_async + async def get(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> _models.Schedule: + """Get schedule. + + Get schedule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Schedule name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Schedule or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Schedule + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.Schedule] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("Schedule", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore + + async def _create_or_update_initial( + self, resource_group_name: str, workspace_name: str, name: str, body: Union[_models.Schedule, IO], **kwargs: Any + ) -> _models.Schedule: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Schedule] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "Schedule") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("Schedule", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("Schedule", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.Schedule, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Schedule]: + """Create or update schedule. + + Create or update schedule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Schedule name. Required. + :type name: str + :param body: Schedule definition. Required. + :type body: ~azure.mgmt.machinelearningservices.models.Schedule + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Schedule or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Schedule] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Schedule]: + """Create or update schedule. + + Create or update schedule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Schedule name. Required. + :type name: str + :param body: Schedule definition. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Schedule or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Schedule] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, resource_group_name: str, workspace_name: str, name: str, body: Union[_models.Schedule, IO], **kwargs: Any + ) -> AsyncLROPoller[_models.Schedule]: + """Create or update schedule. + + Create or update schedule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Schedule name. Required. + :type name: str + :param body: Schedule definition. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.Schedule or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Schedule or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Schedule] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Schedule] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("Schedule", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_usages_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_usages_operations.py index b5a6eb14f798..c3f93307f2fb 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_usages_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_usages_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,91 +6,106 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +from urllib.parse import parse_qs, urljoin, urlparse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._vendor import _convert_request +from ...operations._usages_operations import build_list_request -T = TypeVar('T') +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class UsagesOperations: - """UsagesOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class UsagesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`usages` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list( - self, - location: str, - **kwargs - ) -> AsyncIterable["_models.ListUsagesResult"]: + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list(self, location: str, **kwargs: Any) -> AsyncIterable["_models.Usage"]: """Gets the current usage information as well as limits for AML resources for given subscription and location. - :param location: The location for which resource usage is queried. + :param location: The location for which resource usage is queried. Required. :type location: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ListUsagesResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ListUsagesResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Usage or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.Usage] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListUsagesResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ListUsagesResult] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + location=location, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('ListUsagesResult', pipeline_response) + deserialized = self._deserialize("ListUsagesResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -98,16 +114,18 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages'} # type: ignore + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py index 109eba1351e8..578d6868865a 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,101 +6,102 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, Optional, TypeVar + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._vendor import _convert_request +from ...operations._virtual_machine_sizes_operations import build_list_request -T = TypeVar('T') +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class VirtualMachineSizesOperations: - """VirtualMachineSizesOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class VirtualMachineSizesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`virtual_machine_sizes` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def list( - self, - location: str, - compute_type: Optional[str] = None, - recommended: Optional[bool] = None, - **kwargs - ) -> "_models.VirtualMachineSizeListResult": + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def list(self, location: str, **kwargs: Any) -> _models.VirtualMachineSizeListResult: """Returns supported VM Sizes in a location. - :param location: The location upon which virtual-machine-sizes is queried. + :param location: The location upon which virtual-machine-sizes is queried. Required. :type location: str - :param compute_type: Type of compute to filter by. - :type compute_type: str - :param recommended: Specifies whether to return recommended vm sizes or all vm sizes. - :type recommended: bool :keyword callable cls: A custom type or function that will be passed the direct response - :return: VirtualMachineSizeListResult, or the result of cls(response) + :return: VirtualMachineSizeListResult or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.VirtualMachineSizeListResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineSizeListResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if compute_type is not None: - query_parameters['compute-type'] = self._serialize.query("compute_type", compute_type, 'str') - if recommended is not None: - query_parameters['recommended'] = self._serialize.query("recommended", recommended, 'bool') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualMachineSizeListResult] + + request = build_list_request( + location=location, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('VirtualMachineSizeListResult', pipeline_response) + deserialized = self._deserialize("VirtualMachineSizeListResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes'} # type: ignore + + list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspace_connections_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspace_connections_operations.py index c01237741478..10c4541fbd5a 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspace_connections_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspace_connections_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,317 +6,431 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - -T = TypeVar('T') +from ..._vendor import _convert_request +from ...operations._workspace_connections_operations import ( + build_create_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class WorkspaceConnectionsOperations: - """WorkspaceConnectionsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class WorkspaceConnectionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`workspace_connections` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - def list( + @overload + async def create( self, resource_group_name: str, workspace_name: str, - target: Optional[str] = None, - category: Optional[str] = None, - **kwargs - ) -> AsyncIterable["_models.PaginatedWorkspaceConnectionsList"]: - """List all connections under a AML workspace. - - :param resource_group_name: Name of the resource group in which workspace is located. + connection_name: str, + parameters: _models.WorkspaceConnectionPropertiesV2BasicResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """create. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param target: Target of the workspace connection. - :type target: str - :param category: Category of the workspace connection. - :type category: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :param parameters: The object for creating or updating a new workspace connection. Required. + :type parameters: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PaginatedWorkspaceConnectionsList or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedWorkspaceConnectionsList] - :raises: ~azure.core.exceptions.HttpResponseError + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PaginatedWorkspaceConnectionsList"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if target is not None: - query_parameters['target'] = self._serialize.query("target", target, 'str') - if category is not None: - query_parameters['category'] = self._serialize.query("category", category, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('PaginatedWorkspaceConnectionsList', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections'} # type: ignore + @overload + async def create( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """create. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :param parameters: The object for creating or updating a new workspace connection. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + @distributed_trace_async async def create( self, resource_group_name: str, workspace_name: str, connection_name: str, - parameters: "_models.WorkspaceConnectionDto", - **kwargs - ) -> "_models.WorkspaceConnection": - """Add a new workspace connection. + parameters: Union[_models.WorkspaceConnectionPropertiesV2BasicResource, IO], + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """create. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param connection_name: Friendly name of the workspace connection. + :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str - :param parameters: The object for creating or updating a new workspace connection. - :type parameters: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionDto + :param parameters: The object for creating or updating a new workspace connection. Is either a + model type or a IO type. Required. + :type parameters: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: WorkspaceConnection, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnection - :raises: ~azure.core.exceptions.HttpResponseError + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceConnection"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'connectionName': self._serialize.url("connection_name", connection_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'WorkspaceConnectionDto') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "WorkspaceConnectionPropertiesV2BasicResource") + + request = build_create_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('WorkspaceConnection', pipeline_response) + deserialized = self._deserialize("WorkspaceConnectionPropertiesV2BasicResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore + create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}"} # type: ignore + + @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - connection_name: str, - **kwargs - ) -> "_models.WorkspaceConnection": - """Get the detail of a workspace connection. + self, resource_group_name: str, workspace_name: str, connection_name: str, **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """get. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param connection_name: Friendly name of the workspace connection. + :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: WorkspaceConnection, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnection - :raises: ~azure.core.exceptions.HttpResponseError + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceConnection"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'connectionName': self._serialize.url("connection_name", connection_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResource] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('WorkspaceConnection', pipeline_response) + deserialized = self._deserialize("WorkspaceConnectionPropertiesV2BasicResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore - async def delete( - self, - resource_group_name: str, - workspace_name: str, - connection_name: str, - **kwargs + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, connection_name: str, **kwargs: Any ) -> None: - """Delete a workspace connection. + """delete. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param connection_name: Friendly name of the workspace connection. + :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'connectionName': self._serialize.url("connection_name", connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}"} # type: ignore + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + target: Optional[str] = None, + category: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_models.WorkspaceConnectionPropertiesV2BasicResource"]: + """list. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param target: Target of the workspace connection. Default value is None. + :type target: str + :param category: Category of the workspace connection. Default value is None. + :type category: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either WorkspaceConnectionPropertiesV2BasicResource or + the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop( + "cls", None + ) # type: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + target=target, + category=category, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize( + "WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult", pipeline_response + ) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspace_features_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspace_features_operations.py index 019e736e719f..cf335083f9e1 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspace_features_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspace_features_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,94 +6,111 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +from urllib.parse import parse_qs, urljoin, urlparse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models +from ..._vendor import _convert_request +from ...operations._workspace_features_operations import build_list_request -T = TypeVar('T') +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class WorkspaceFeaturesOperations: - """WorkspaceFeaturesOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class WorkspaceFeaturesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`workspace_features` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @distributed_trace def list( - self, - resource_group_name: str, - workspace_name: str, - **kwargs - ) -> AsyncIterable["_models.ListAmlUserFeatureResult"]: + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncIterable["_models.AmlUserFeature"]: """Lists all enabled features for a workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ListAmlUserFeatureResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ListAmlUserFeatureResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either AmlUserFeature or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.AmlUserFeature] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListAmlUserFeatureResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ListAmlUserFeatureResult] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('ListAmlUserFeatureResult', pipeline_response) + deserialized = self._deserialize("ListAmlUserFeatureResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -101,17 +119,18 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features'} # type: ignore + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspaces_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspaces_operations.py index 75d2153d645f..267893a89fb7 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspaces_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspaces_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,462 +6,734 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union -import warnings +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models - -T = TypeVar('T') +from ..._vendor import _convert_request +from ...operations._workspaces_operations import ( + build_create_or_update_request, + build_delete_request, + build_diagnose_request, + build_get_request, + build_list_by_resource_group_request, + build_list_by_subscription_request, + build_list_keys_request, + build_list_notebook_access_token_request, + build_list_notebook_keys_request, + build_list_outbound_network_dependencies_endpoints_request, + build_list_storage_account_keys_request, + build_prepare_notebook_request, + build_resync_keys_request, + build_update_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class WorkspacesOperations: - """WorkspacesOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class WorkspacesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`workspaces` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - async def get( - self, - resource_group_name: str, - workspace_name: str, - **kwargs - ) -> "_models.Workspace": + @distributed_trace_async + async def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _models.Workspace: """Gets the properties of the specified machine learning workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Workspace, or the result of cls(response) + :return: Workspace or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.Workspace - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.Workspace] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore async def _create_or_update_initial( - self, - resource_group_name: str, - workspace_name: str, - parameters: "_models.Workspace", - **kwargs - ) -> Optional["_models.Workspace"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Workspace"]] + self, resource_group_name: str, workspace_name: str, parameters: Union[_models.Workspace, IO], **kwargs: Any + ) -> Optional[_models.Workspace]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._create_or_update_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'Workspace') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.Workspace]] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "Workspace") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response - if response.status_code not in [200, 201, 202]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: - deserialized = self._deserialize('Workspace', pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + + @overload async def begin_create_or_update( self, resource_group_name: str, workspace_name: str, - parameters: "_models.Workspace", - **kwargs - ) -> AsyncLROPoller["_models.Workspace"]: + parameters: _models.Workspace, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: """Creates or updates a workspace with the specified parameters. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :param parameters: The parameters for creating or updating a machine learning workspace. + Required. :type parameters: ~azure.mgmt.machinelearningservices.models.Workspace + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either Workspace or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Workspace or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Creates or updates a workspace with the specified parameters. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param parameters: The parameters for creating or updating a machine learning workspace. + Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Workspace or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, resource_group_name: str, workspace_name: str, parameters: Union[_models.Workspace, IO], **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Creates or updates a workspace with the specified parameters. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param parameters: The parameters for creating or updating a machine learning workspace. Is + either a model type or a IO type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.Workspace or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Workspace or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Workspace] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = await self._create_or_update_initial( + raw_result = await self._create_or_update_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, parameters=parameters, - cls=lambda x,y,z: x, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize('Workspace', pipeline_response) - + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - async def _delete_initial( - self, - resource_group_name: str, - workspace_name: str, - **kwargs + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) - _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore - async def begin_delete( - self, - resource_group_name: str, - workspace_name: str, - **kwargs - ) -> AsyncLROPoller[None]: + @distributed_trace_async + async def begin_delete(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> AsyncLROPoller[None]: """Deletes a machine learning workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, - cls=lambda x,y,z: x, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) + kwargs.pop("error_map", None) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore - async def update( + async def _update_initial( self, resource_group_name: str, workspace_name: str, - parameters: "_models.WorkspaceUpdateParameters", - **kwargs - ) -> "_models.Workspace": - """Updates a machine learning workspace with the specified parameters. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param parameters: The parameters for updating a machine learning workspace. - :type parameters: ~azure.mgmt.machinelearningservices.models.WorkspaceUpdateParameters - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Workspace, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.Workspace - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] + parameters: Union[_models.WorkspaceUpdateParameters, IO], + **kwargs: Any + ) -> Optional[_models.Workspace]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'WorkspaceUpdateParameters') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.Workspace]] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "WorkspaceUpdateParameters") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore - def list_by_resource_group( + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + parameters: _models.WorkspaceUpdateParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Updates a machine learning workspace with the specified parameters. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param parameters: The parameters for updating a machine learning workspace. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.WorkspaceUpdateParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Workspace or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( self, resource_group_name: str, - skiptoken: Optional[str] = None, - **kwargs - ) -> AsyncIterable["_models.WorkspaceListResult"]: + workspace_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Updates a machine learning workspace with the specified parameters. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param parameters: The parameters for updating a machine learning workspace. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Workspace or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + parameters: Union[_models.WorkspaceUpdateParameters, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Updates a machine learning workspace with the specified parameters. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param parameters: The parameters for updating a machine learning workspace. Is either a model + type or a IO type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.WorkspaceUpdateParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Workspace or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Workspace] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("Workspace", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + + @distributed_trace + def list_by_resource_group( + self, resource_group_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> AsyncIterable["_models.Workspace"]: """Lists all the available machine learning workspaces under the specified resource group. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param skiptoken: Continuation token for pagination. - :type skiptoken: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceListResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Workspace or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceListResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.WorkspaceListResult] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_resource_group.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if skiptoken is not None: - query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + skip=skip, + api_version=api_version, + template_url=self.list_by_resource_group.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + deserialized = self._deserialize("WorkspaceListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -469,187 +742,486 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return AsyncItemPaged( - get_next, extract_data + return AsyncItemPaged(get_next, extract_data) + + list_by_resource_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces"} # type: ignore + + async def _diagnose_initial( + self, + resource_group_name: str, + workspace_name: str, + parameters: Optional[Union[_models.DiagnoseWorkspaceParameters, IO]] = None, + **kwargs: Any + ) -> Optional[_models.DiagnoseResponseResult]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.DiagnoseResponseResult]] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + if parameters is not None: + _json = self._serialize.body(parameters, "DiagnoseWorkspaceParameters") + else: + _json = None + + request = build_diagnose_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._diagnose_initial.metadata["url"], + headers=_headers, + params=_params, ) - list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - async def list_keys( + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("DiagnoseResponseResult", pipeline_response) + + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _diagnose_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose"} # type: ignore + + @overload + async def begin_diagnose( + self, + resource_group_name: str, + workspace_name: str, + parameters: Optional[_models.DiagnoseWorkspaceParameters] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DiagnoseResponseResult]: + """Diagnose workspace setup issue. + + Diagnose workspace setup issue. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param parameters: The parameter of diagnosing workspace health. Default value is None. + :type parameters: ~azure.mgmt.machinelearningservices.models.DiagnoseWorkspaceParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DiagnoseResponseResult or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.DiagnoseResponseResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_diagnose( self, resource_group_name: str, workspace_name: str, - **kwargs - ) -> "_models.ListWorkspaceKeysResult": + parameters: Optional[IO] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DiagnoseResponseResult]: + """Diagnose workspace setup issue. + + Diagnose workspace setup issue. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param parameters: The parameter of diagnosing workspace health. Default value is None. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DiagnoseResponseResult or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.DiagnoseResponseResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_diagnose( + self, + resource_group_name: str, + workspace_name: str, + parameters: Optional[Union[_models.DiagnoseWorkspaceParameters, IO]] = None, + **kwargs: Any + ) -> AsyncLROPoller[_models.DiagnoseResponseResult]: + """Diagnose workspace setup issue. + + Diagnose workspace setup issue. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param parameters: The parameter of diagnosing workspace health. Is either a model type or a IO + type. Default value is None. + :type parameters: ~azure.mgmt.machinelearningservices.models.DiagnoseWorkspaceParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DiagnoseResponseResult or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.DiagnoseResponseResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DiagnoseResponseResult] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._diagnose_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("DiagnoseResponseResult", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_diagnose.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose"} # type: ignore + + @distributed_trace_async + async def list_keys( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.ListWorkspaceKeysResult: """Lists all the keys associated with this workspace. This includes keys for the storage account, app insights and password for container registry. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListWorkspaceKeysResult, or the result of cls(response) + :return: ListWorkspaceKeysResult or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ListWorkspaceKeysResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListWorkspaceKeysResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.list_keys.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ListWorkspaceKeysResult] + + request = build_list_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_keys.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ListWorkspaceKeysResult', pipeline_response) + deserialized = self._deserialize("ListWorkspaceKeysResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys'} # type: ignore - async def resync_keys( - self, - resource_group_name: str, - workspace_name: str, - **kwargs - ) -> None: - """Resync all the keys associated with this workspace. This includes keys for the storage account, - app insights and password for container registry. + list_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys"} # type: ignore - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] + async def _resync_keys_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.resync_keys.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_resync_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._resync_keys_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) - resync_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} # type: ignore + _resync_keys_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys"} # type: ignore - def list_by_subscription( - self, - skiptoken: Optional[str] = None, - **kwargs - ) -> AsyncIterable["_models.WorkspaceListResult"]: + @distributed_trace_async + async def begin_resync_keys( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Resync all the keys associated with this workspace. This includes keys for the storage account, + app insights and password for container registry. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._resync_keys_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_resync_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys"} # type: ignore + + @distributed_trace + def list_by_subscription(self, skip: Optional[str] = None, **kwargs: Any) -> AsyncIterable["_models.Workspace"]: """Lists all the available machine learning workspaces under the specified subscription. - :param skiptoken: Continuation token for pagination. - :type skiptoken: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceListResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Workspace or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceListResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.WorkspaceListResult] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_subscription.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if skiptoken is not None: - query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + skip=skip, + api_version=api_version, + template_url=self.list_by_subscription.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + deserialized = self._deserialize("WorkspaceListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -658,17 +1230,391 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return AsyncItemPaged( - get_next, extract_data + return AsyncItemPaged(get_next, extract_data) + + list_by_subscription.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces"} # type: ignore + + @distributed_trace_async + async def list_notebook_access_token( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.NotebookAccessTokenResult: + """return notebook access token and refresh token. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: NotebookAccessTokenResult or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.NotebookAccessTokenResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.NotebookAccessTokenResult] + + request = build_list_notebook_access_token_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_notebook_access_token.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs ) - list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("NotebookAccessTokenResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_notebook_access_token.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken"} # type: ignore + + async def _prepare_notebook_initial( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> Optional[_models.NotebookResourceInfo]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.NotebookResourceInfo]] + + request = build_prepare_notebook_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._prepare_notebook_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize("NotebookResourceInfo", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _prepare_notebook_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook"} # type: ignore + + @distributed_trace_async + async def begin_prepare_notebook( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncLROPoller[_models.NotebookResourceInfo]: + """Prepare a notebook. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either NotebookResourceInfo or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.NotebookResourceInfo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.NotebookResourceInfo] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._prepare_notebook_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("NotebookResourceInfo", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_prepare_notebook.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook"} # type: ignore + + @distributed_trace_async + async def list_storage_account_keys( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.ListStorageAccountKeysResult: + """List storage account keys of a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListStorageAccountKeysResult or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ListStorageAccountKeysResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ListStorageAccountKeysResult] + + request = build_list_storage_account_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_storage_account_keys.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ListStorageAccountKeysResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_storage_account_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys"} # type: ignore + + @distributed_trace_async + async def list_notebook_keys( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.ListNotebookKeysResult: + """List keys of a notebook. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListNotebookKeysResult or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ListNotebookKeysResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ListNotebookKeysResult] + + request = build_list_notebook_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_notebook_keys.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ListNotebookKeysResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_notebook_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys"} # type: ignore + + @distributed_trace_async + async def list_outbound_network_dependencies_endpoints( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.ExternalFQDNResponse: + """Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs) + programmatically. + + Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs) + programmatically. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExternalFQDNResponse or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ExternalFQDNResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ExternalFQDNResponse] + + request = build_list_outbound_network_dependencies_endpoints_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_outbound_network_dependencies_endpoints.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ExternalFQDNResponse", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_outbound_network_dependencies_endpoints.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/__init__.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/__init__.py index 05c48b7f14c0..ae7c9598050b 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/__init__.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/__init__.py @@ -6,340 +6,1140 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -try: - from ._models_py3 import AKS - from ._models_py3 import AKSProperties - from ._models_py3 import AksComputeSecrets - from ._models_py3 import AksNetworkingConfiguration - from ._models_py3 import AmlCompute - from ._models_py3 import AmlComputeNodeInformation - from ._models_py3 import AmlComputeNodesInformation - from ._models_py3 import AmlComputeProperties - from ._models_py3 import AmlUserFeature - from ._models_py3 import ClusterUpdateParameters - from ._models_py3 import ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties - from ._models_py3 import Compute - from ._models_py3 import ComputeInstance - from ._models_py3 import ComputeInstanceApplication - from ._models_py3 import ComputeInstanceConnectivityEndpoints - from ._models_py3 import ComputeInstanceCreatedBy - from ._models_py3 import ComputeInstanceLastOperation - from ._models_py3 import ComputeInstanceProperties - from ._models_py3 import ComputeInstanceSshSettings - from ._models_py3 import ComputeNodesInformation - from ._models_py3 import ComputeResource - from ._models_py3 import ComputeSecrets - from ._models_py3 import DataFactory - from ._models_py3 import DataLakeAnalytics - from ._models_py3 import DataLakeAnalyticsProperties - from ._models_py3 import Databricks - from ._models_py3 import DatabricksComputeSecrets - from ._models_py3 import DatabricksProperties - from ._models_py3 import EncryptionProperty - from ._models_py3 import ErrorDetail - from ._models_py3 import ErrorResponse - from ._models_py3 import EstimatedVMPrice - from ._models_py3 import EstimatedVMPrices - from ._models_py3 import HDInsight - from ._models_py3 import HDInsightProperties - from ._models_py3 import Identity - from ._models_py3 import KeyVaultProperties - from ._models_py3 import ListAmlUserFeatureResult - from ._models_py3 import ListUsagesResult - from ._models_py3 import ListWorkspaceKeysResult - from ._models_py3 import ListWorkspaceQuotas - from ._models_py3 import MachineLearningServiceError - from ._models_py3 import NodeStateCounts - from ._models_py3 import NotebookListCredentialsResult - from ._models_py3 import NotebookPreparationError - from ._models_py3 import NotebookResourceInfo - from ._models_py3 import Operation - from ._models_py3 import OperationDisplay - from ._models_py3 import OperationListResult - from ._models_py3 import PaginatedComputeResourcesList - from ._models_py3 import PaginatedWorkspaceConnectionsList - from ._models_py3 import Password - from ._models_py3 import PrivateEndpoint - from ._models_py3 import PrivateEndpointConnection - from ._models_py3 import PrivateLinkResource - from ._models_py3 import PrivateLinkResourceListResult - from ._models_py3 import PrivateLinkServiceConnectionState - from ._models_py3 import QuotaBaseProperties - from ._models_py3 import QuotaUpdateParameters - from ._models_py3 import RegistryListCredentialsResult - from ._models_py3 import Resource - from ._models_py3 import ResourceId - from ._models_py3 import ResourceName - from ._models_py3 import ResourceQuota - from ._models_py3 import ResourceSkuLocationInfo - from ._models_py3 import ResourceSkuZoneDetails - from ._models_py3 import Restriction - from ._models_py3 import SKUCapability - from ._models_py3 import ScaleSettings - from ._models_py3 import ServicePrincipalCredentials - from ._models_py3 import SharedPrivateLinkResource - from ._models_py3 import Sku - from ._models_py3 import SkuListResult - from ._models_py3 import SkuSettings - from ._models_py3 import SslConfiguration - from ._models_py3 import SystemService - from ._models_py3 import UpdateWorkspaceQuotas - from ._models_py3 import UpdateWorkspaceQuotasResult - from ._models_py3 import Usage - from ._models_py3 import UsageName - from ._models_py3 import UserAccountCredentials - from ._models_py3 import VirtualMachine - from ._models_py3 import VirtualMachineProperties - from ._models_py3 import VirtualMachineSecrets - from ._models_py3 import VirtualMachineSize - from ._models_py3 import VirtualMachineSizeListResult - from ._models_py3 import VirtualMachineSshCredentials - from ._models_py3 import Workspace - from ._models_py3 import WorkspaceConnection - from ._models_py3 import WorkspaceConnectionDto - from ._models_py3 import WorkspaceListResult - from ._models_py3 import WorkspaceSku - from ._models_py3 import WorkspaceUpdateParameters -except (SyntaxError, ImportError): - from ._models import AKS # type: ignore - from ._models import AKSProperties # type: ignore - from ._models import AksComputeSecrets # type: ignore - from ._models import AksNetworkingConfiguration # type: ignore - from ._models import AmlCompute # type: ignore - from ._models import AmlComputeNodeInformation # type: ignore - from ._models import AmlComputeNodesInformation # type: ignore - from ._models import AmlComputeProperties # type: ignore - from ._models import AmlUserFeature # type: ignore - from ._models import ClusterUpdateParameters # type: ignore - from ._models import ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties # type: ignore - from ._models import Compute # type: ignore - from ._models import ComputeInstance # type: ignore - from ._models import ComputeInstanceApplication # type: ignore - from ._models import ComputeInstanceConnectivityEndpoints # type: ignore - from ._models import ComputeInstanceCreatedBy # type: ignore - from ._models import ComputeInstanceLastOperation # type: ignore - from ._models import ComputeInstanceProperties # type: ignore - from ._models import ComputeInstanceSshSettings # type: ignore - from ._models import ComputeNodesInformation # type: ignore - from ._models import ComputeResource # type: ignore - from ._models import ComputeSecrets # type: ignore - from ._models import DataFactory # type: ignore - from ._models import DataLakeAnalytics # type: ignore - from ._models import DataLakeAnalyticsProperties # type: ignore - from ._models import Databricks # type: ignore - from ._models import DatabricksComputeSecrets # type: ignore - from ._models import DatabricksProperties # type: ignore - from ._models import EncryptionProperty # type: ignore - from ._models import ErrorDetail # type: ignore - from ._models import ErrorResponse # type: ignore - from ._models import EstimatedVMPrice # type: ignore - from ._models import EstimatedVMPrices # type: ignore - from ._models import HDInsight # type: ignore - from ._models import HDInsightProperties # type: ignore - from ._models import Identity # type: ignore - from ._models import KeyVaultProperties # type: ignore - from ._models import ListAmlUserFeatureResult # type: ignore - from ._models import ListUsagesResult # type: ignore - from ._models import ListWorkspaceKeysResult # type: ignore - from ._models import ListWorkspaceQuotas # type: ignore - from ._models import MachineLearningServiceError # type: ignore - from ._models import NodeStateCounts # type: ignore - from ._models import NotebookListCredentialsResult # type: ignore - from ._models import NotebookPreparationError # type: ignore - from ._models import NotebookResourceInfo # type: ignore - from ._models import Operation # type: ignore - from ._models import OperationDisplay # type: ignore - from ._models import OperationListResult # type: ignore - from ._models import PaginatedComputeResourcesList # type: ignore - from ._models import PaginatedWorkspaceConnectionsList # type: ignore - from ._models import Password # type: ignore - from ._models import PrivateEndpoint # type: ignore - from ._models import PrivateEndpointConnection # type: ignore - from ._models import PrivateLinkResource # type: ignore - from ._models import PrivateLinkResourceListResult # type: ignore - from ._models import PrivateLinkServiceConnectionState # type: ignore - from ._models import QuotaBaseProperties # type: ignore - from ._models import QuotaUpdateParameters # type: ignore - from ._models import RegistryListCredentialsResult # type: ignore - from ._models import Resource # type: ignore - from ._models import ResourceId # type: ignore - from ._models import ResourceName # type: ignore - from ._models import ResourceQuota # type: ignore - from ._models import ResourceSkuLocationInfo # type: ignore - from ._models import ResourceSkuZoneDetails # type: ignore - from ._models import Restriction # type: ignore - from ._models import SKUCapability # type: ignore - from ._models import ScaleSettings # type: ignore - from ._models import ServicePrincipalCredentials # type: ignore - from ._models import SharedPrivateLinkResource # type: ignore - from ._models import Sku # type: ignore - from ._models import SkuListResult # type: ignore - from ._models import SkuSettings # type: ignore - from ._models import SslConfiguration # type: ignore - from ._models import SystemService # type: ignore - from ._models import UpdateWorkspaceQuotas # type: ignore - from ._models import UpdateWorkspaceQuotasResult # type: ignore - from ._models import Usage # type: ignore - from ._models import UsageName # type: ignore - from ._models import UserAccountCredentials # type: ignore - from ._models import VirtualMachine # type: ignore - from ._models import VirtualMachineProperties # type: ignore - from ._models import VirtualMachineSecrets # type: ignore - from ._models import VirtualMachineSize # type: ignore - from ._models import VirtualMachineSizeListResult # type: ignore - from ._models import VirtualMachineSshCredentials # type: ignore - from ._models import Workspace # type: ignore - from ._models import WorkspaceConnection # type: ignore - from ._models import WorkspaceConnectionDto # type: ignore - from ._models import WorkspaceListResult # type: ignore - from ._models import WorkspaceSku # type: ignore - from ._models import WorkspaceUpdateParameters # type: ignore +from ._models_py3 import AKS +from ._models_py3 import AKSSchema +from ._models_py3 import AKSSchemaProperties +from ._models_py3 import AccessKeyAuthTypeWorkspaceConnectionProperties +from ._models_py3 import AccountKeyDatastoreCredentials +from ._models_py3 import AccountKeyDatastoreSecrets +from ._models_py3 import AcrDetails +from ._models_py3 import AksComputeSecrets +from ._models_py3 import AksComputeSecretsProperties +from ._models_py3 import AksNetworkingConfiguration +from ._models_py3 import AllNodes +from ._models_py3 import AmlCompute +from ._models_py3 import AmlComputeNodeInformation +from ._models_py3 import AmlComputeNodesInformation +from ._models_py3 import AmlComputeProperties +from ._models_py3 import AmlComputeSchema +from ._models_py3 import AmlOperation +from ._models_py3 import AmlOperationDisplay +from ._models_py3 import AmlOperationListResult +from ._models_py3 import AmlToken +from ._models_py3 import AmlUserFeature +from ._models_py3 import ArmResourceId +from ._models_py3 import AssetBase +from ._models_py3 import AssetContainer +from ._models_py3 import AssetJobInput +from ._models_py3 import AssetJobOutput +from ._models_py3 import AssetReferenceBase +from ._models_py3 import AssignedUser +from ._models_py3 import AutoForecastHorizon +from ._models_py3 import AutoMLJob +from ._models_py3 import AutoMLVertical +from ._models_py3 import AutoNCrossValidations +from ._models_py3 import AutoPauseProperties +from ._models_py3 import AutoScaleProperties +from ._models_py3 import AutoSeasonality +from ._models_py3 import AutoTargetLags +from ._models_py3 import AutoTargetRollingWindowSize +from ._models_py3 import AutologgerSettings +from ._models_py3 import AzureBlobDatastore +from ._models_py3 import AzureDataLakeGen1Datastore +from ._models_py3 import AzureDataLakeGen2Datastore +from ._models_py3 import AzureDatastore +from ._models_py3 import AzureFileDatastore +from ._models_py3 import BanditPolicy +from ._models_py3 import BatchDeployment +from ._models_py3 import BatchDeploymentProperties +from ._models_py3 import BatchDeploymentTrackedResourceArmPaginatedResult +from ._models_py3 import BatchEndpoint +from ._models_py3 import BatchEndpointDefaults +from ._models_py3 import BatchEndpointProperties +from ._models_py3 import BatchEndpointTrackedResourceArmPaginatedResult +from ._models_py3 import BatchRetrySettings +from ._models_py3 import BayesianSamplingAlgorithm +from ._models_py3 import BindOptions +from ._models_py3 import BuildContext +from ._models_py3 import CertificateDatastoreCredentials +from ._models_py3 import CertificateDatastoreSecrets +from ._models_py3 import Classification +from ._models_py3 import ClassificationTrainingSettings +from ._models_py3 import ClusterUpdateParameters +from ._models_py3 import CocoExportSummary +from ._models_py3 import CodeConfiguration +from ._models_py3 import CodeContainer +from ._models_py3 import CodeContainerProperties +from ._models_py3 import CodeContainerResourceArmPaginatedResult +from ._models_py3 import CodeVersion +from ._models_py3 import CodeVersionProperties +from ._models_py3 import CodeVersionResourceArmPaginatedResult +from ._models_py3 import ColumnTransformer +from ._models_py3 import CommandJob +from ._models_py3 import CommandJobLimits +from ._models_py3 import ComponentContainer +from ._models_py3 import ComponentContainerProperties +from ._models_py3 import ComponentContainerResourceArmPaginatedResult +from ._models_py3 import ComponentVersion +from ._models_py3 import ComponentVersionProperties +from ._models_py3 import ComponentVersionResourceArmPaginatedResult +from ._models_py3 import Compute +from ._models_py3 import ComputeInstance +from ._models_py3 import ComputeInstanceApplication +from ._models_py3 import ComputeInstanceAutologgerSettings +from ._models_py3 import ComputeInstanceConnectivityEndpoints +from ._models_py3 import ComputeInstanceContainer +from ._models_py3 import ComputeInstanceCreatedBy +from ._models_py3 import ComputeInstanceDataDisk +from ._models_py3 import ComputeInstanceDataMount +from ._models_py3 import ComputeInstanceEnvironmentInfo +from ._models_py3 import ComputeInstanceLastOperation +from ._models_py3 import ComputeInstanceProperties +from ._models_py3 import ComputeInstanceSchema +from ._models_py3 import ComputeInstanceSshSettings +from ._models_py3 import ComputeInstanceVersion +from ._models_py3 import ComputeResource +from ._models_py3 import ComputeResourceSchema +from ._models_py3 import ComputeSchedules +from ._models_py3 import ComputeSecrets +from ._models_py3 import ComputeStartStopSchedule +from ._models_py3 import ContainerResourceRequirements +from ._models_py3 import ContainerResourceSettings +from ._models_py3 import CosmosDbSettings +from ._models_py3 import Cron +from ._models_py3 import CronTrigger +from ._models_py3 import CsvExportSummary +from ._models_py3 import CustomForecastHorizon +from ._models_py3 import CustomModelJobInput +from ._models_py3 import CustomModelJobOutput +from ._models_py3 import CustomNCrossValidations +from ._models_py3 import CustomSeasonality +from ._models_py3 import CustomService +from ._models_py3 import CustomTargetLags +from ._models_py3 import CustomTargetRollingWindowSize +from ._models_py3 import DataContainer +from ._models_py3 import DataContainerProperties +from ._models_py3 import DataContainerResourceArmPaginatedResult +from ._models_py3 import DataFactory +from ._models_py3 import DataLakeAnalytics +from ._models_py3 import DataLakeAnalyticsSchema +from ._models_py3 import DataLakeAnalyticsSchemaProperties +from ._models_py3 import DataPathAssetReference +from ._models_py3 import DataVersionBase +from ._models_py3 import DataVersionBaseProperties +from ._models_py3 import DataVersionBaseResourceArmPaginatedResult +from ._models_py3 import Databricks +from ._models_py3 import DatabricksComputeSecrets +from ._models_py3 import DatabricksComputeSecretsProperties +from ._models_py3 import DatabricksProperties +from ._models_py3 import DatabricksSchema +from ._models_py3 import DatasetExportSummary +from ._models_py3 import Datastore +from ._models_py3 import DatastoreCredentials +from ._models_py3 import DatastoreProperties +from ._models_py3 import DatastoreResourceArmPaginatedResult +from ._models_py3 import DatastoreSecrets +from ._models_py3 import DefaultScaleSettings +from ._models_py3 import DeploymentLogs +from ._models_py3 import DeploymentLogsRequest +from ._models_py3 import DeploymentResourceConfiguration +from ._models_py3 import DiagnoseRequestProperties +from ._models_py3 import DiagnoseResponseResult +from ._models_py3 import DiagnoseResponseResultValue +from ._models_py3 import DiagnoseResult +from ._models_py3 import DiagnoseWorkspaceParameters +from ._models_py3 import DistributionConfiguration +from ._models_py3 import Docker +from ._models_py3 import EarlyTerminationPolicy +from ._models_py3 import EncryptionKeyVaultProperties +from ._models_py3 import EncryptionKeyVaultUpdateProperties +from ._models_py3 import EncryptionProperty +from ._models_py3 import EncryptionUpdateProperties +from ._models_py3 import Endpoint +from ._models_py3 import EndpointAuthKeys +from ._models_py3 import EndpointAuthToken +from ._models_py3 import EndpointDeploymentPropertiesBase +from ._models_py3 import EndpointPropertiesBase +from ._models_py3 import EndpointScheduleAction +from ._models_py3 import EnvironmentContainer +from ._models_py3 import EnvironmentContainerProperties +from ._models_py3 import EnvironmentContainerResourceArmPaginatedResult +from ._models_py3 import EnvironmentVariable +from ._models_py3 import EnvironmentVersion +from ._models_py3 import EnvironmentVersionProperties +from ._models_py3 import EnvironmentVersionResourceArmPaginatedResult +from ._models_py3 import ErrorAdditionalInfo +from ._models_py3 import ErrorDetail +from ._models_py3 import ErrorResponse +from ._models_py3 import EstimatedVMPrice +from ._models_py3 import EstimatedVMPrices +from ._models_py3 import ExportSummary +from ._models_py3 import ExternalFQDNResponse +from ._models_py3 import FQDNEndpoint +from ._models_py3 import FQDNEndpointDetail +from ._models_py3 import FQDNEndpoints +from ._models_py3 import FQDNEndpointsProperties +from ._models_py3 import FeaturizationSettings +from ._models_py3 import FlavorData +from ._models_py3 import ForecastHorizon +from ._models_py3 import Forecasting +from ._models_py3 import ForecastingSettings +from ._models_py3 import ForecastingTrainingSettings +from ._models_py3 import GridSamplingAlgorithm +from ._models_py3 import HDInsight +from ._models_py3 import HDInsightProperties +from ._models_py3 import HDInsightSchema +from ._models_py3 import HdfsDatastore +from ._models_py3 import IdAssetReference +from ._models_py3 import IdentityConfiguration +from ._models_py3 import IdentityForCmk +from ._models_py3 import IdleShutdownSetting +from ._models_py3 import Image +from ._models_py3 import ImageClassification +from ._models_py3 import ImageClassificationBase +from ._models_py3 import ImageClassificationMultilabel +from ._models_py3 import ImageInstanceSegmentation +from ._models_py3 import ImageLimitSettings +from ._models_py3 import ImageMetadata +from ._models_py3 import ImageModelDistributionSettings +from ._models_py3 import ImageModelDistributionSettingsClassification +from ._models_py3 import ImageModelDistributionSettingsObjectDetection +from ._models_py3 import ImageModelSettings +from ._models_py3 import ImageModelSettingsClassification +from ._models_py3 import ImageModelSettingsObjectDetection +from ._models_py3 import ImageObjectDetection +from ._models_py3 import ImageObjectDetectionBase +from ._models_py3 import ImageSweepSettings +from ._models_py3 import ImageVertical +from ._models_py3 import InferenceContainerProperties +from ._models_py3 import InstanceTypeSchema +from ._models_py3 import InstanceTypeSchemaResources +from ._models_py3 import JobBase +from ._models_py3 import JobBaseProperties +from ._models_py3 import JobBaseResourceArmPaginatedResult +from ._models_py3 import JobInput +from ._models_py3 import JobLimits +from ._models_py3 import JobOutput +from ._models_py3 import JobResourceConfiguration +from ._models_py3 import JobScheduleAction +from ._models_py3 import JobService +from ._models_py3 import KerberosCredentials +from ._models_py3 import KerberosKeytabCredentials +from ._models_py3 import KerberosKeytabSecrets +from ._models_py3 import KerberosPasswordCredentials +from ._models_py3 import KerberosPasswordSecrets +from ._models_py3 import Kubernetes +from ._models_py3 import KubernetesOnlineDeployment +from ._models_py3 import KubernetesProperties +from ._models_py3 import KubernetesSchema +from ._models_py3 import LabelCategory +from ._models_py3 import LabelClass +from ._models_py3 import LabelingDataConfiguration +from ._models_py3 import LabelingJob +from ._models_py3 import LabelingJobImageProperties +from ._models_py3 import LabelingJobInstructions +from ._models_py3 import LabelingJobMediaProperties +from ._models_py3 import LabelingJobProperties +from ._models_py3 import LabelingJobResourceArmPaginatedResult +from ._models_py3 import LabelingJobTextProperties +from ._models_py3 import ListAmlUserFeatureResult +from ._models_py3 import ListNotebookKeysResult +from ._models_py3 import ListStorageAccountKeysResult +from ._models_py3 import ListUsagesResult +from ._models_py3 import ListWorkspaceKeysResult +from ._models_py3 import ListWorkspaceQuotas +from ._models_py3 import LiteralJobInput +from ._models_py3 import MLAssistConfiguration +from ._models_py3 import MLAssistConfigurationDisabled +from ._models_py3 import MLAssistConfigurationEnabled +from ._models_py3 import MLFlowModelJobInput +from ._models_py3 import MLFlowModelJobOutput +from ._models_py3 import MLTableData +from ._models_py3 import MLTableJobInput +from ._models_py3 import MLTableJobOutput +from ._models_py3 import ManagedIdentity +from ._models_py3 import ManagedIdentityAuthTypeWorkspaceConnectionProperties +from ._models_py3 import ManagedOnlineDeployment +from ._models_py3 import ManagedServiceIdentity +from ._models_py3 import MedianStoppingPolicy +from ._models_py3 import ModelContainer +from ._models_py3 import ModelContainerProperties +from ._models_py3 import ModelContainerResourceArmPaginatedResult +from ._models_py3 import ModelVersion +from ._models_py3 import ModelVersionProperties +from ._models_py3 import ModelVersionResourceArmPaginatedResult +from ._models_py3 import Mpi +from ._models_py3 import NCrossValidations +from ._models_py3 import NlpFixedParameters +from ._models_py3 import NlpParameterSubspace +from ._models_py3 import NlpSweepSettings +from ._models_py3 import NlpVertical +from ._models_py3 import NlpVerticalFeaturizationSettings +from ._models_py3 import NlpVerticalLimitSettings +from ._models_py3 import NodeStateCounts +from ._models_py3 import Nodes +from ._models_py3 import NoneAuthTypeWorkspaceConnectionProperties +from ._models_py3 import NoneDatastoreCredentials +from ._models_py3 import NotebookAccessTokenResult +from ._models_py3 import NotebookPreparationError +from ._models_py3 import NotebookResourceInfo +from ._models_py3 import Objective +from ._models_py3 import OnlineDeployment +from ._models_py3 import OnlineDeploymentProperties +from ._models_py3 import OnlineDeploymentTrackedResourceArmPaginatedResult +from ._models_py3 import OnlineEndpoint +from ._models_py3 import OnlineEndpointProperties +from ._models_py3 import OnlineEndpointTrackedResourceArmPaginatedResult +from ._models_py3 import OnlineRequestSettings +from ._models_py3 import OnlineScaleSettings +from ._models_py3 import OutputPathAssetReference +from ._models_py3 import PATAuthTypeWorkspaceConnectionProperties +from ._models_py3 import PaginatedComputeResourcesList +from ._models_py3 import PartialBatchDeployment +from ._models_py3 import PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties +from ._models_py3 import PartialManagedServiceIdentity +from ._models_py3 import PartialMinimalTrackedResource +from ._models_py3 import PartialMinimalTrackedResourceWithIdentity +from ._models_py3 import PartialMinimalTrackedResourceWithSku +from ._models_py3 import PartialRegistryPartialTrackedResource +from ._models_py3 import PartialSku +from ._models_py3 import Password +from ._models_py3 import PersonalComputeInstanceSettings +from ._models_py3 import PipelineJob +from ._models_py3 import PrivateEndpoint +from ._models_py3 import PrivateEndpointConnection +from ._models_py3 import PrivateEndpointConnectionListResult +from ._models_py3 import PrivateLinkResource +from ._models_py3 import PrivateLinkResourceListResult +from ._models_py3 import PrivateLinkServiceConnectionState +from ._models_py3 import ProbeSettings +from ._models_py3 import ProgressMetrics +from ._models_py3 import PyTorch +from ._models_py3 import QuotaBaseProperties +from ._models_py3 import QuotaUpdateParameters +from ._models_py3 import RandomSamplingAlgorithm +from ._models_py3 import Recurrence +from ._models_py3 import RecurrenceSchedule +from ._models_py3 import RecurrenceTrigger +from ._models_py3 import RegenerateEndpointKeysRequest +from ._models_py3 import Registry +from ._models_py3 import RegistryListCredentialsResult +from ._models_py3 import RegistryProperties +from ._models_py3 import RegistryRegionArmDetails +from ._models_py3 import RegistryTrackedResourceArmPaginatedResult +from ._models_py3 import Regression +from ._models_py3 import RegressionTrainingSettings +from ._models_py3 import Resource +from ._models_py3 import ResourceBase +from ._models_py3 import ResourceConfiguration +from ._models_py3 import ResourceId +from ._models_py3 import ResourceName +from ._models_py3 import ResourceQuota +from ._models_py3 import Route +from ._models_py3 import SASAuthTypeWorkspaceConnectionProperties +from ._models_py3 import SamplingAlgorithm +from ._models_py3 import SasDatastoreCredentials +from ._models_py3 import SasDatastoreSecrets +from ._models_py3 import ScaleSettings +from ._models_py3 import ScaleSettingsInformation +from ._models_py3 import Schedule +from ._models_py3 import ScheduleActionBase +from ._models_py3 import ScheduleBase +from ._models_py3 import ScheduleProperties +from ._models_py3 import ScheduleResourceArmPaginatedResult +from ._models_py3 import ScriptReference +from ._models_py3 import ScriptsToExecute +from ._models_py3 import Seasonality +from ._models_py3 import ServiceManagedResourcesSettings +from ._models_py3 import ServicePrincipalAuthTypeWorkspaceConnectionProperties +from ._models_py3 import ServicePrincipalDatastoreCredentials +from ._models_py3 import ServicePrincipalDatastoreSecrets +from ._models_py3 import SetupScripts +from ._models_py3 import SharedPrivateLinkResource +from ._models_py3 import Sku +from ._models_py3 import SkuCapacity +from ._models_py3 import SkuResource +from ._models_py3 import SkuResourceArmPaginatedResult +from ._models_py3 import SkuSetting +from ._models_py3 import SparkJob +from ._models_py3 import SparkJobEntry +from ._models_py3 import SparkJobPythonEntry +from ._models_py3 import SparkJobScalaEntry +from ._models_py3 import SparkResourceConfiguration +from ._models_py3 import SslConfiguration +from ._models_py3 import StackEnsembleSettings +from ._models_py3 import StatusMessage +from ._models_py3 import StorageAccountDetails +from ._models_py3 import SweepJob +from ._models_py3 import SweepJobLimits +from ._models_py3 import SynapseSpark +from ._models_py3 import SynapseSparkProperties +from ._models_py3 import SystemCreatedAcrAccount +from ._models_py3 import SystemCreatedStorageAccount +from ._models_py3 import SystemData +from ._models_py3 import SystemService +from ._models_py3 import TableFixedParameters +from ._models_py3 import TableParameterSubspace +from ._models_py3 import TableSweepSettings +from ._models_py3 import TableVertical +from ._models_py3 import TableVerticalFeaturizationSettings +from ._models_py3 import TableVerticalLimitSettings +from ._models_py3 import TargetLags +from ._models_py3 import TargetRollingWindowSize +from ._models_py3 import TargetUtilizationScaleSettings +from ._models_py3 import TensorFlow +from ._models_py3 import TextClassification +from ._models_py3 import TextClassificationMultilabel +from ._models_py3 import TextNer +from ._models_py3 import TmpfsOptions +from ._models_py3 import TrackedResource +from ._models_py3 import TrainingSettings +from ._models_py3 import TrialComponent +from ._models_py3 import TriggerBase +from ._models_py3 import TritonModelJobInput +from ._models_py3 import TritonModelJobOutput +from ._models_py3 import TruncationSelectionPolicy +from ._models_py3 import UpdateWorkspaceQuotas +from ._models_py3 import UpdateWorkspaceQuotasResult +from ._models_py3 import UriFileDataVersion +from ._models_py3 import UriFileJobInput +from ._models_py3 import UriFileJobOutput +from ._models_py3 import UriFolderDataVersion +from ._models_py3 import UriFolderJobInput +from ._models_py3 import UriFolderJobOutput +from ._models_py3 import Usage +from ._models_py3 import UsageName +from ._models_py3 import UserAccountCredentials +from ._models_py3 import UserAssignedIdentity +from ._models_py3 import UserCreatedAcrAccount +from ._models_py3 import UserCreatedStorageAccount +from ._models_py3 import UserIdentity +from ._models_py3 import UsernamePasswordAuthTypeWorkspaceConnectionProperties +from ._models_py3 import VirtualMachine +from ._models_py3 import VirtualMachineImage +from ._models_py3 import VirtualMachineSchema +from ._models_py3 import VirtualMachineSchemaProperties +from ._models_py3 import VirtualMachineSecrets +from ._models_py3 import VirtualMachineSecretsSchema +from ._models_py3 import VirtualMachineSize +from ._models_py3 import VirtualMachineSizeListResult +from ._models_py3 import VirtualMachineSshCredentials +from ._models_py3 import VolumeDefinition +from ._models_py3 import VolumeOptions +from ._models_py3 import Workspace +from ._models_py3 import WorkspaceConnectionAccessKey +from ._models_py3 import WorkspaceConnectionManagedIdentity +from ._models_py3 import WorkspaceConnectionPersonalAccessToken +from ._models_py3 import WorkspaceConnectionPropertiesV2 +from ._models_py3 import WorkspaceConnectionPropertiesV2BasicResource +from ._models_py3 import WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult +from ._models_py3 import WorkspaceConnectionServicePrincipal +from ._models_py3 import WorkspaceConnectionSharedAccessSignature +from ._models_py3 import WorkspaceConnectionUsernamePassword +from ._models_py3 import WorkspaceListResult +from ._models_py3 import WorkspaceUpdateParameters -from ._azure_machine_learning_workspaces_enums import ( - AllocationState, - ApplicationSharingPolicy, - BillingCurrency, - ComputeInstanceState, - ComputeType, - EncryptionStatus, - NodeState, - OperationName, - OperationStatus, - PrivateEndpointConnectionProvisioningState, - PrivateEndpointServiceConnectionStatus, - ProvisioningState, - QuotaUnit, - ReasonCode, - RemoteLoginPortPublicAccess, - ResourceIdentityType, - SshPublicAccess, - SslConfigurationStatus, - Status, - UnderlyingResourceAction, - UnitOfMeasure, - UsageUnit, - VMPriceOSType, - VMTier, - VmPriority, -) +from ._azure_machine_learning_services_enums import AllocationState +from ._azure_machine_learning_services_enums import ApplicationSharingPolicy +from ._azure_machine_learning_services_enums import AssetProvisioningState +from ._azure_machine_learning_services_enums import Autosave +from ._azure_machine_learning_services_enums import BatchLoggingLevel +from ._azure_machine_learning_services_enums import BatchOutputAction +from ._azure_machine_learning_services_enums import BillingCurrency +from ._azure_machine_learning_services_enums import BlockedTransformers +from ._azure_machine_learning_services_enums import Caching +from ._azure_machine_learning_services_enums import ClassificationModels +from ._azure_machine_learning_services_enums import ClassificationMultilabelPrimaryMetrics +from ._azure_machine_learning_services_enums import ClassificationPrimaryMetrics +from ._azure_machine_learning_services_enums import ClusterPurpose +from ._azure_machine_learning_services_enums import ComputeInstanceAuthorizationType +from ._azure_machine_learning_services_enums import ComputeInstanceState +from ._azure_machine_learning_services_enums import ComputePowerAction +from ._azure_machine_learning_services_enums import ComputeProvisioningState +from ._azure_machine_learning_services_enums import ComputeType +from ._azure_machine_learning_services_enums import ConnectionAuthType +from ._azure_machine_learning_services_enums import ContainerType +from ._azure_machine_learning_services_enums import CreatedByType +from ._azure_machine_learning_services_enums import CredentialsType +from ._azure_machine_learning_services_enums import DataType +from ._azure_machine_learning_services_enums import DatastoreType +from ._azure_machine_learning_services_enums import DeploymentProvisioningState +from ._azure_machine_learning_services_enums import DiagnoseResultLevel +from ._azure_machine_learning_services_enums import DistributionType +from ._azure_machine_learning_services_enums import EarlyTerminationPolicyType +from ._azure_machine_learning_services_enums import EgressPublicNetworkAccessType +from ._azure_machine_learning_services_enums import EncryptionStatus +from ._azure_machine_learning_services_enums import EndpointAuthMode +from ._azure_machine_learning_services_enums import EndpointComputeType +from ._azure_machine_learning_services_enums import EndpointProvisioningState +from ._azure_machine_learning_services_enums import EnvironmentType +from ._azure_machine_learning_services_enums import EnvironmentVariableType +from ._azure_machine_learning_services_enums import ExportFormatType +from ._azure_machine_learning_services_enums import FeatureLags +from ._azure_machine_learning_services_enums import FeaturizationMode +from ._azure_machine_learning_services_enums import ForecastHorizonMode +from ._azure_machine_learning_services_enums import ForecastingModels +from ._azure_machine_learning_services_enums import ForecastingPrimaryMetrics +from ._azure_machine_learning_services_enums import Goal +from ._azure_machine_learning_services_enums import IdentityConfigurationType +from ._azure_machine_learning_services_enums import ImageAnnotationType +from ._azure_machine_learning_services_enums import ImageType +from ._azure_machine_learning_services_enums import InputDeliveryMode +from ._azure_machine_learning_services_enums import InstanceSegmentationPrimaryMetrics +from ._azure_machine_learning_services_enums import JobInputType +from ._azure_machine_learning_services_enums import JobLimitsType +from ._azure_machine_learning_services_enums import JobOutputType +from ._azure_machine_learning_services_enums import JobProvisioningState +from ._azure_machine_learning_services_enums import JobStatus +from ._azure_machine_learning_services_enums import JobType +from ._azure_machine_learning_services_enums import KeyType +from ._azure_machine_learning_services_enums import LearningRateScheduler +from ._azure_machine_learning_services_enums import ListViewType +from ._azure_machine_learning_services_enums import LoadBalancerType +from ._azure_machine_learning_services_enums import LogVerbosity +from ._azure_machine_learning_services_enums import MLAssistConfigurationType +from ._azure_machine_learning_services_enums import MLFlowAutologgerState +from ._azure_machine_learning_services_enums import ManagedServiceIdentityType +from ._azure_machine_learning_services_enums import MediaType +from ._azure_machine_learning_services_enums import MlflowAutologger +from ._azure_machine_learning_services_enums import ModelSize +from ._azure_machine_learning_services_enums import MountAction +from ._azure_machine_learning_services_enums import MountState +from ._azure_machine_learning_services_enums import NCrossValidationsMode +from ._azure_machine_learning_services_enums import Network +from ._azure_machine_learning_services_enums import NlpLearningRateScheduler +from ._azure_machine_learning_services_enums import NodeState +from ._azure_machine_learning_services_enums import NodesValueType +from ._azure_machine_learning_services_enums import ObjectDetectionPrimaryMetrics +from ._azure_machine_learning_services_enums import OperatingSystemType +from ._azure_machine_learning_services_enums import OperationName +from ._azure_machine_learning_services_enums import OperationStatus +from ._azure_machine_learning_services_enums import OperationTrigger +from ._azure_machine_learning_services_enums import OrderString +from ._azure_machine_learning_services_enums import OsType +from ._azure_machine_learning_services_enums import OutputDeliveryMode +from ._azure_machine_learning_services_enums import PrivateEndpointConnectionProvisioningState +from ._azure_machine_learning_services_enums import PrivateEndpointServiceConnectionStatus +from ._azure_machine_learning_services_enums import Protocol +from ._azure_machine_learning_services_enums import ProvisioningStatus +from ._azure_machine_learning_services_enums import PublicNetworkAccess +from ._azure_machine_learning_services_enums import PublicNetworkAccessType +from ._azure_machine_learning_services_enums import QuotaUnit +from ._azure_machine_learning_services_enums import RandomSamplingAlgorithmRule +from ._azure_machine_learning_services_enums import RecurrenceFrequency +from ._azure_machine_learning_services_enums import ReferenceType +from ._azure_machine_learning_services_enums import RegressionModels +from ._azure_machine_learning_services_enums import RegressionPrimaryMetrics +from ._azure_machine_learning_services_enums import RemoteLoginPortPublicAccess +from ._azure_machine_learning_services_enums import SamplingAlgorithmType +from ._azure_machine_learning_services_enums import ScaleType +from ._azure_machine_learning_services_enums import ScheduleActionType +from ._azure_machine_learning_services_enums import ScheduleListViewType +from ._azure_machine_learning_services_enums import ScheduleProvisioningState +from ._azure_machine_learning_services_enums import ScheduleProvisioningStatus +from ._azure_machine_learning_services_enums import ScheduleStatus +from ._azure_machine_learning_services_enums import SeasonalityMode +from ._azure_machine_learning_services_enums import SecretsType +from ._azure_machine_learning_services_enums import ServiceDataAccessAuthIdentity +from ._azure_machine_learning_services_enums import ShortSeriesHandlingConfiguration +from ._azure_machine_learning_services_enums import SkuScaleType +from ._azure_machine_learning_services_enums import SkuTier +from ._azure_machine_learning_services_enums import SourceType +from ._azure_machine_learning_services_enums import SparkJobEntryType +from ._azure_machine_learning_services_enums import SshPublicAccess +from ._azure_machine_learning_services_enums import SslConfigStatus +from ._azure_machine_learning_services_enums import StackMetaLearnerType +from ._azure_machine_learning_services_enums import Status +from ._azure_machine_learning_services_enums import StatusMessageLevel +from ._azure_machine_learning_services_enums import StochasticOptimizer +from ._azure_machine_learning_services_enums import StorageAccountType +from ._azure_machine_learning_services_enums import TargetAggregationFunction +from ._azure_machine_learning_services_enums import TargetLagsMode +from ._azure_machine_learning_services_enums import TargetRollingWindowSizeMode +from ._azure_machine_learning_services_enums import TaskType +from ._azure_machine_learning_services_enums import TextAnnotationType +from ._azure_machine_learning_services_enums import TriggerType +from ._azure_machine_learning_services_enums import UnderlyingResourceAction +from ._azure_machine_learning_services_enums import UnitOfMeasure +from ._azure_machine_learning_services_enums import UsageUnit +from ._azure_machine_learning_services_enums import UseStl +from ._azure_machine_learning_services_enums import VMPriceOSType +from ._azure_machine_learning_services_enums import VMTier +from ._azure_machine_learning_services_enums import ValidationMetricType +from ._azure_machine_learning_services_enums import ValueFormat +from ._azure_machine_learning_services_enums import VmPriority +from ._azure_machine_learning_services_enums import VolumeDefinitionType +from ._azure_machine_learning_services_enums import WeekDay +from ._azure_machine_learning_services_enums import WorkspaceProvisioningState +from ._patch import __all__ as _patch_all +from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk __all__ = [ - 'AKS', - 'AKSProperties', - 'AksComputeSecrets', - 'AksNetworkingConfiguration', - 'AmlCompute', - 'AmlComputeNodeInformation', - 'AmlComputeNodesInformation', - 'AmlComputeProperties', - 'AmlUserFeature', - 'ClusterUpdateParameters', - 'ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties', - 'Compute', - 'ComputeInstance', - 'ComputeInstanceApplication', - 'ComputeInstanceConnectivityEndpoints', - 'ComputeInstanceCreatedBy', - 'ComputeInstanceLastOperation', - 'ComputeInstanceProperties', - 'ComputeInstanceSshSettings', - 'ComputeNodesInformation', - 'ComputeResource', - 'ComputeSecrets', - 'DataFactory', - 'DataLakeAnalytics', - 'DataLakeAnalyticsProperties', - 'Databricks', - 'DatabricksComputeSecrets', - 'DatabricksProperties', - 'EncryptionProperty', - 'ErrorDetail', - 'ErrorResponse', - 'EstimatedVMPrice', - 'EstimatedVMPrices', - 'HDInsight', - 'HDInsightProperties', - 'Identity', - 'KeyVaultProperties', - 'ListAmlUserFeatureResult', - 'ListUsagesResult', - 'ListWorkspaceKeysResult', - 'ListWorkspaceQuotas', - 'MachineLearningServiceError', - 'NodeStateCounts', - 'NotebookListCredentialsResult', - 'NotebookPreparationError', - 'NotebookResourceInfo', - 'Operation', - 'OperationDisplay', - 'OperationListResult', - 'PaginatedComputeResourcesList', - 'PaginatedWorkspaceConnectionsList', - 'Password', - 'PrivateEndpoint', - 'PrivateEndpointConnection', - 'PrivateLinkResource', - 'PrivateLinkResourceListResult', - 'PrivateLinkServiceConnectionState', - 'QuotaBaseProperties', - 'QuotaUpdateParameters', - 'RegistryListCredentialsResult', - 'Resource', - 'ResourceId', - 'ResourceName', - 'ResourceQuota', - 'ResourceSkuLocationInfo', - 'ResourceSkuZoneDetails', - 'Restriction', - 'SKUCapability', - 'ScaleSettings', - 'ServicePrincipalCredentials', - 'SharedPrivateLinkResource', - 'Sku', - 'SkuListResult', - 'SkuSettings', - 'SslConfiguration', - 'SystemService', - 'UpdateWorkspaceQuotas', - 'UpdateWorkspaceQuotasResult', - 'Usage', - 'UsageName', - 'UserAccountCredentials', - 'VirtualMachine', - 'VirtualMachineProperties', - 'VirtualMachineSecrets', - 'VirtualMachineSize', - 'VirtualMachineSizeListResult', - 'VirtualMachineSshCredentials', - 'Workspace', - 'WorkspaceConnection', - 'WorkspaceConnectionDto', - 'WorkspaceListResult', - 'WorkspaceSku', - 'WorkspaceUpdateParameters', - 'AllocationState', - 'ApplicationSharingPolicy', - 'BillingCurrency', - 'ComputeInstanceState', - 'ComputeType', - 'EncryptionStatus', - 'NodeState', - 'OperationName', - 'OperationStatus', - 'PrivateEndpointConnectionProvisioningState', - 'PrivateEndpointServiceConnectionStatus', - 'ProvisioningState', - 'QuotaUnit', - 'ReasonCode', - 'RemoteLoginPortPublicAccess', - 'ResourceIdentityType', - 'SshPublicAccess', - 'SslConfigurationStatus', - 'Status', - 'UnderlyingResourceAction', - 'UnitOfMeasure', - 'UsageUnit', - 'VMPriceOSType', - 'VMTier', - 'VmPriority', + "AKS", + "AKSSchema", + "AKSSchemaProperties", + "AccessKeyAuthTypeWorkspaceConnectionProperties", + "AccountKeyDatastoreCredentials", + "AccountKeyDatastoreSecrets", + "AcrDetails", + "AksComputeSecrets", + "AksComputeSecretsProperties", + "AksNetworkingConfiguration", + "AllNodes", + "AmlCompute", + "AmlComputeNodeInformation", + "AmlComputeNodesInformation", + "AmlComputeProperties", + "AmlComputeSchema", + "AmlOperation", + "AmlOperationDisplay", + "AmlOperationListResult", + "AmlToken", + "AmlUserFeature", + "ArmResourceId", + "AssetBase", + "AssetContainer", + "AssetJobInput", + "AssetJobOutput", + "AssetReferenceBase", + "AssignedUser", + "AutoForecastHorizon", + "AutoMLJob", + "AutoMLVertical", + "AutoNCrossValidations", + "AutoPauseProperties", + "AutoScaleProperties", + "AutoSeasonality", + "AutoTargetLags", + "AutoTargetRollingWindowSize", + "AutologgerSettings", + "AzureBlobDatastore", + "AzureDataLakeGen1Datastore", + "AzureDataLakeGen2Datastore", + "AzureDatastore", + "AzureFileDatastore", + "BanditPolicy", + "BatchDeployment", + "BatchDeploymentProperties", + "BatchDeploymentTrackedResourceArmPaginatedResult", + "BatchEndpoint", + "BatchEndpointDefaults", + "BatchEndpointProperties", + "BatchEndpointTrackedResourceArmPaginatedResult", + "BatchRetrySettings", + "BayesianSamplingAlgorithm", + "BindOptions", + "BuildContext", + "CertificateDatastoreCredentials", + "CertificateDatastoreSecrets", + "Classification", + "ClassificationTrainingSettings", + "ClusterUpdateParameters", + "CocoExportSummary", + "CodeConfiguration", + "CodeContainer", + "CodeContainerProperties", + "CodeContainerResourceArmPaginatedResult", + "CodeVersion", + "CodeVersionProperties", + "CodeVersionResourceArmPaginatedResult", + "ColumnTransformer", + "CommandJob", + "CommandJobLimits", + "ComponentContainer", + "ComponentContainerProperties", + "ComponentContainerResourceArmPaginatedResult", + "ComponentVersion", + "ComponentVersionProperties", + "ComponentVersionResourceArmPaginatedResult", + "Compute", + "ComputeInstance", + "ComputeInstanceApplication", + "ComputeInstanceAutologgerSettings", + "ComputeInstanceConnectivityEndpoints", + "ComputeInstanceContainer", + "ComputeInstanceCreatedBy", + "ComputeInstanceDataDisk", + "ComputeInstanceDataMount", + "ComputeInstanceEnvironmentInfo", + "ComputeInstanceLastOperation", + "ComputeInstanceProperties", + "ComputeInstanceSchema", + "ComputeInstanceSshSettings", + "ComputeInstanceVersion", + "ComputeResource", + "ComputeResourceSchema", + "ComputeSchedules", + "ComputeSecrets", + "ComputeStartStopSchedule", + "ContainerResourceRequirements", + "ContainerResourceSettings", + "CosmosDbSettings", + "Cron", + "CronTrigger", + "CsvExportSummary", + "CustomForecastHorizon", + "CustomModelJobInput", + "CustomModelJobOutput", + "CustomNCrossValidations", + "CustomSeasonality", + "CustomService", + "CustomTargetLags", + "CustomTargetRollingWindowSize", + "DataContainer", + "DataContainerProperties", + "DataContainerResourceArmPaginatedResult", + "DataFactory", + "DataLakeAnalytics", + "DataLakeAnalyticsSchema", + "DataLakeAnalyticsSchemaProperties", + "DataPathAssetReference", + "DataVersionBase", + "DataVersionBaseProperties", + "DataVersionBaseResourceArmPaginatedResult", + "Databricks", + "DatabricksComputeSecrets", + "DatabricksComputeSecretsProperties", + "DatabricksProperties", + "DatabricksSchema", + "DatasetExportSummary", + "Datastore", + "DatastoreCredentials", + "DatastoreProperties", + "DatastoreResourceArmPaginatedResult", + "DatastoreSecrets", + "DefaultScaleSettings", + "DeploymentLogs", + "DeploymentLogsRequest", + "DeploymentResourceConfiguration", + "DiagnoseRequestProperties", + "DiagnoseResponseResult", + "DiagnoseResponseResultValue", + "DiagnoseResult", + "DiagnoseWorkspaceParameters", + "DistributionConfiguration", + "Docker", + "EarlyTerminationPolicy", + "EncryptionKeyVaultProperties", + "EncryptionKeyVaultUpdateProperties", + "EncryptionProperty", + "EncryptionUpdateProperties", + "Endpoint", + "EndpointAuthKeys", + "EndpointAuthToken", + "EndpointDeploymentPropertiesBase", + "EndpointPropertiesBase", + "EndpointScheduleAction", + "EnvironmentContainer", + "EnvironmentContainerProperties", + "EnvironmentContainerResourceArmPaginatedResult", + "EnvironmentVariable", + "EnvironmentVersion", + "EnvironmentVersionProperties", + "EnvironmentVersionResourceArmPaginatedResult", + "ErrorAdditionalInfo", + "ErrorDetail", + "ErrorResponse", + "EstimatedVMPrice", + "EstimatedVMPrices", + "ExportSummary", + "ExternalFQDNResponse", + "FQDNEndpoint", + "FQDNEndpointDetail", + "FQDNEndpoints", + "FQDNEndpointsProperties", + "FeaturizationSettings", + "FlavorData", + "ForecastHorizon", + "Forecasting", + "ForecastingSettings", + "ForecastingTrainingSettings", + "GridSamplingAlgorithm", + "HDInsight", + "HDInsightProperties", + "HDInsightSchema", + "HdfsDatastore", + "IdAssetReference", + "IdentityConfiguration", + "IdentityForCmk", + "IdleShutdownSetting", + "Image", + "ImageClassification", + "ImageClassificationBase", + "ImageClassificationMultilabel", + "ImageInstanceSegmentation", + "ImageLimitSettings", + "ImageMetadata", + "ImageModelDistributionSettings", + "ImageModelDistributionSettingsClassification", + "ImageModelDistributionSettingsObjectDetection", + "ImageModelSettings", + "ImageModelSettingsClassification", + "ImageModelSettingsObjectDetection", + "ImageObjectDetection", + "ImageObjectDetectionBase", + "ImageSweepSettings", + "ImageVertical", + "InferenceContainerProperties", + "InstanceTypeSchema", + "InstanceTypeSchemaResources", + "JobBase", + "JobBaseProperties", + "JobBaseResourceArmPaginatedResult", + "JobInput", + "JobLimits", + "JobOutput", + "JobResourceConfiguration", + "JobScheduleAction", + "JobService", + "KerberosCredentials", + "KerberosKeytabCredentials", + "KerberosKeytabSecrets", + "KerberosPasswordCredentials", + "KerberosPasswordSecrets", + "Kubernetes", + "KubernetesOnlineDeployment", + "KubernetesProperties", + "KubernetesSchema", + "LabelCategory", + "LabelClass", + "LabelingDataConfiguration", + "LabelingJob", + "LabelingJobImageProperties", + "LabelingJobInstructions", + "LabelingJobMediaProperties", + "LabelingJobProperties", + "LabelingJobResourceArmPaginatedResult", + "LabelingJobTextProperties", + "ListAmlUserFeatureResult", + "ListNotebookKeysResult", + "ListStorageAccountKeysResult", + "ListUsagesResult", + "ListWorkspaceKeysResult", + "ListWorkspaceQuotas", + "LiteralJobInput", + "MLAssistConfiguration", + "MLAssistConfigurationDisabled", + "MLAssistConfigurationEnabled", + "MLFlowModelJobInput", + "MLFlowModelJobOutput", + "MLTableData", + "MLTableJobInput", + "MLTableJobOutput", + "ManagedIdentity", + "ManagedIdentityAuthTypeWorkspaceConnectionProperties", + "ManagedOnlineDeployment", + "ManagedServiceIdentity", + "MedianStoppingPolicy", + "ModelContainer", + "ModelContainerProperties", + "ModelContainerResourceArmPaginatedResult", + "ModelVersion", + "ModelVersionProperties", + "ModelVersionResourceArmPaginatedResult", + "Mpi", + "NCrossValidations", + "NlpFixedParameters", + "NlpParameterSubspace", + "NlpSweepSettings", + "NlpVertical", + "NlpVerticalFeaturizationSettings", + "NlpVerticalLimitSettings", + "NodeStateCounts", + "Nodes", + "NoneAuthTypeWorkspaceConnectionProperties", + "NoneDatastoreCredentials", + "NotebookAccessTokenResult", + "NotebookPreparationError", + "NotebookResourceInfo", + "Objective", + "OnlineDeployment", + "OnlineDeploymentProperties", + "OnlineDeploymentTrackedResourceArmPaginatedResult", + "OnlineEndpoint", + "OnlineEndpointProperties", + "OnlineEndpointTrackedResourceArmPaginatedResult", + "OnlineRequestSettings", + "OnlineScaleSettings", + "OutputPathAssetReference", + "PATAuthTypeWorkspaceConnectionProperties", + "PaginatedComputeResourcesList", + "PartialBatchDeployment", + "PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties", + "PartialManagedServiceIdentity", + "PartialMinimalTrackedResource", + "PartialMinimalTrackedResourceWithIdentity", + "PartialMinimalTrackedResourceWithSku", + "PartialRegistryPartialTrackedResource", + "PartialSku", + "Password", + "PersonalComputeInstanceSettings", + "PipelineJob", + "PrivateEndpoint", + "PrivateEndpointConnection", + "PrivateEndpointConnectionListResult", + "PrivateLinkResource", + "PrivateLinkResourceListResult", + "PrivateLinkServiceConnectionState", + "ProbeSettings", + "ProgressMetrics", + "PyTorch", + "QuotaBaseProperties", + "QuotaUpdateParameters", + "RandomSamplingAlgorithm", + "Recurrence", + "RecurrenceSchedule", + "RecurrenceTrigger", + "RegenerateEndpointKeysRequest", + "Registry", + "RegistryListCredentialsResult", + "RegistryProperties", + "RegistryRegionArmDetails", + "RegistryTrackedResourceArmPaginatedResult", + "Regression", + "RegressionTrainingSettings", + "Resource", + "ResourceBase", + "ResourceConfiguration", + "ResourceId", + "ResourceName", + "ResourceQuota", + "Route", + "SASAuthTypeWorkspaceConnectionProperties", + "SamplingAlgorithm", + "SasDatastoreCredentials", + "SasDatastoreSecrets", + "ScaleSettings", + "ScaleSettingsInformation", + "Schedule", + "ScheduleActionBase", + "ScheduleBase", + "ScheduleProperties", + "ScheduleResourceArmPaginatedResult", + "ScriptReference", + "ScriptsToExecute", + "Seasonality", + "ServiceManagedResourcesSettings", + "ServicePrincipalAuthTypeWorkspaceConnectionProperties", + "ServicePrincipalDatastoreCredentials", + "ServicePrincipalDatastoreSecrets", + "SetupScripts", + "SharedPrivateLinkResource", + "Sku", + "SkuCapacity", + "SkuResource", + "SkuResourceArmPaginatedResult", + "SkuSetting", + "SparkJob", + "SparkJobEntry", + "SparkJobPythonEntry", + "SparkJobScalaEntry", + "SparkResourceConfiguration", + "SslConfiguration", + "StackEnsembleSettings", + "StatusMessage", + "StorageAccountDetails", + "SweepJob", + "SweepJobLimits", + "SynapseSpark", + "SynapseSparkProperties", + "SystemCreatedAcrAccount", + "SystemCreatedStorageAccount", + "SystemData", + "SystemService", + "TableFixedParameters", + "TableParameterSubspace", + "TableSweepSettings", + "TableVertical", + "TableVerticalFeaturizationSettings", + "TableVerticalLimitSettings", + "TargetLags", + "TargetRollingWindowSize", + "TargetUtilizationScaleSettings", + "TensorFlow", + "TextClassification", + "TextClassificationMultilabel", + "TextNer", + "TmpfsOptions", + "TrackedResource", + "TrainingSettings", + "TrialComponent", + "TriggerBase", + "TritonModelJobInput", + "TritonModelJobOutput", + "TruncationSelectionPolicy", + "UpdateWorkspaceQuotas", + "UpdateWorkspaceQuotasResult", + "UriFileDataVersion", + "UriFileJobInput", + "UriFileJobOutput", + "UriFolderDataVersion", + "UriFolderJobInput", + "UriFolderJobOutput", + "Usage", + "UsageName", + "UserAccountCredentials", + "UserAssignedIdentity", + "UserCreatedAcrAccount", + "UserCreatedStorageAccount", + "UserIdentity", + "UsernamePasswordAuthTypeWorkspaceConnectionProperties", + "VirtualMachine", + "VirtualMachineImage", + "VirtualMachineSchema", + "VirtualMachineSchemaProperties", + "VirtualMachineSecrets", + "VirtualMachineSecretsSchema", + "VirtualMachineSize", + "VirtualMachineSizeListResult", + "VirtualMachineSshCredentials", + "VolumeDefinition", + "VolumeOptions", + "Workspace", + "WorkspaceConnectionAccessKey", + "WorkspaceConnectionManagedIdentity", + "WorkspaceConnectionPersonalAccessToken", + "WorkspaceConnectionPropertiesV2", + "WorkspaceConnectionPropertiesV2BasicResource", + "WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult", + "WorkspaceConnectionServicePrincipal", + "WorkspaceConnectionSharedAccessSignature", + "WorkspaceConnectionUsernamePassword", + "WorkspaceListResult", + "WorkspaceUpdateParameters", + "AllocationState", + "ApplicationSharingPolicy", + "AssetProvisioningState", + "Autosave", + "BatchLoggingLevel", + "BatchOutputAction", + "BillingCurrency", + "BlockedTransformers", + "Caching", + "ClassificationModels", + "ClassificationMultilabelPrimaryMetrics", + "ClassificationPrimaryMetrics", + "ClusterPurpose", + "ComputeInstanceAuthorizationType", + "ComputeInstanceState", + "ComputePowerAction", + "ComputeProvisioningState", + "ComputeType", + "ConnectionAuthType", + "ContainerType", + "CreatedByType", + "CredentialsType", + "DataType", + "DatastoreType", + "DeploymentProvisioningState", + "DiagnoseResultLevel", + "DistributionType", + "EarlyTerminationPolicyType", + "EgressPublicNetworkAccessType", + "EncryptionStatus", + "EndpointAuthMode", + "EndpointComputeType", + "EndpointProvisioningState", + "EnvironmentType", + "EnvironmentVariableType", + "ExportFormatType", + "FeatureLags", + "FeaturizationMode", + "ForecastHorizonMode", + "ForecastingModels", + "ForecastingPrimaryMetrics", + "Goal", + "IdentityConfigurationType", + "ImageAnnotationType", + "ImageType", + "InputDeliveryMode", + "InstanceSegmentationPrimaryMetrics", + "JobInputType", + "JobLimitsType", + "JobOutputType", + "JobProvisioningState", + "JobStatus", + "JobType", + "KeyType", + "LearningRateScheduler", + "ListViewType", + "LoadBalancerType", + "LogVerbosity", + "MLAssistConfigurationType", + "MLFlowAutologgerState", + "ManagedServiceIdentityType", + "MediaType", + "MlflowAutologger", + "ModelSize", + "MountAction", + "MountState", + "NCrossValidationsMode", + "Network", + "NlpLearningRateScheduler", + "NodeState", + "NodesValueType", + "ObjectDetectionPrimaryMetrics", + "OperatingSystemType", + "OperationName", + "OperationStatus", + "OperationTrigger", + "OrderString", + "OsType", + "OutputDeliveryMode", + "PrivateEndpointConnectionProvisioningState", + "PrivateEndpointServiceConnectionStatus", + "Protocol", + "ProvisioningStatus", + "PublicNetworkAccess", + "PublicNetworkAccessType", + "QuotaUnit", + "RandomSamplingAlgorithmRule", + "RecurrenceFrequency", + "ReferenceType", + "RegressionModels", + "RegressionPrimaryMetrics", + "RemoteLoginPortPublicAccess", + "SamplingAlgorithmType", + "ScaleType", + "ScheduleActionType", + "ScheduleListViewType", + "ScheduleProvisioningState", + "ScheduleProvisioningStatus", + "ScheduleStatus", + "SeasonalityMode", + "SecretsType", + "ServiceDataAccessAuthIdentity", + "ShortSeriesHandlingConfiguration", + "SkuScaleType", + "SkuTier", + "SourceType", + "SparkJobEntryType", + "SshPublicAccess", + "SslConfigStatus", + "StackMetaLearnerType", + "Status", + "StatusMessageLevel", + "StochasticOptimizer", + "StorageAccountType", + "TargetAggregationFunction", + "TargetLagsMode", + "TargetRollingWindowSizeMode", + "TaskType", + "TextAnnotationType", + "TriggerType", + "UnderlyingResourceAction", + "UnitOfMeasure", + "UsageUnit", + "UseStl", + "VMPriceOSType", + "VMTier", + "ValidationMetricType", + "ValueFormat", + "VmPriority", + "VolumeDefinitionType", + "WeekDay", + "WorkspaceProvisioningState", ] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_azure_machine_learning_services_enums.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_azure_machine_learning_services_enums.py new file mode 100644 index 000000000000..6114f485d916 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_azure_machine_learning_services_enums.py @@ -0,0 +1,1538 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta + + +class AllocationState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Allocation state of the compute. Possible values are: steady - Indicates that the compute is + not resizing. There are no changes to the number of compute nodes in the compute in progress. A + compute enters this state when it is created and when no operations are being performed on the + compute to change the number of compute nodes. resizing - Indicates that the compute is + resizing; that is, compute nodes are being added to or removed from the compute. + """ + + STEADY = "Steady" + RESIZING = "Resizing" + + +class ApplicationSharingPolicy(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Policy for sharing applications on this compute instance among users of parent workspace. If + Personal, only the creator can access applications on this compute instance. When Shared, any + workspace user can access applications on this instance depending on his/her assigned role. + """ + + PERSONAL = "Personal" + SHARED = "Shared" + + +class AssetProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Provisioning state of registry asset.""" + + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + CREATING = "Creating" + UPDATING = "Updating" + DELETING = "Deleting" + + +class Autosave(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Auto save settings.""" + + NONE = "None" + LOCAL = "Local" + REMOTE = "Remote" + + +class BatchLoggingLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Log verbosity for batch inferencing. + Increasing verbosity order for logging is : Warning, Info and Debug. + The default value is Info. + """ + + INFO = "Info" + WARNING = "Warning" + DEBUG = "Debug" + + +class BatchOutputAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine how batch inferencing will handle output.""" + + SUMMARY_ONLY = "SummaryOnly" + APPEND_ROW = "AppendRow" + + +class BillingCurrency(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Three lettered code specifying the currency of the VM price. Example: USD.""" + + USD = "USD" + + +class BlockedTransformers(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum for all classification models supported by AutoML.""" + + #: Target encoding for text data. + TEXT_TARGET_ENCODER = "TextTargetEncoder" + #: Ohe hot encoding creates a binary feature transformation. + ONE_HOT_ENCODER = "OneHotEncoder" + #: Target encoding for categorical data. + CAT_TARGET_ENCODER = "CatTargetEncoder" + #: Tf-Idf stands for, term-frequency times inverse document-frequency. This is a common term + #: weighting scheme for identifying information from documents. + TF_IDF = "TfIdf" + #: Weight of Evidence encoding is a technique used to encode categorical variables. It uses the + #: natural log of the P(1)/P(0) to create weights. + WO_E_TARGET_ENCODER = "WoETargetEncoder" + #: Label encoder converts labels/categorical variables in a numerical form. + LABEL_ENCODER = "LabelEncoder" + #: Word embedding helps represents words or phrases as a vector, or a series of numbers. + WORD_EMBEDDING = "WordEmbedding" + #: Naive Bayes is a classified that is used for classification of discrete features that are + #: categorically distributed. + NAIVE_BAYES = "NaiveBayes" + #: Count Vectorizer converts a collection of text documents to a matrix of token counts. + COUNT_VECTORIZER = "CountVectorizer" + #: Hashing One Hot Encoder can turn categorical variables into a limited number of new features. + #: This is often used for high-cardinality categorical features. + HASH_ONE_HOT_ENCODER = "HashOneHotEncoder" + + +class Caching(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Caching type of Data Disk.""" + + NONE = "None" + READ_ONLY = "ReadOnly" + READ_WRITE = "ReadWrite" + + +class ClassificationModels(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum for all classification models supported by AutoML.""" + + #: Logistic regression is a fundamental classification technique. + #: It belongs to the group of linear classifiers and is somewhat similar to polynomial and linear + #: regression. + #: Logistic regression is fast and relatively uncomplicated, and it's convenient for you to + #: interpret the results. + #: Although it's essentially a method for binary classification, it can also be applied to + #: multiclass problems. + LOGISTIC_REGRESSION = "LogisticRegression" + #: SGD: Stochastic gradient descent is an optimization algorithm often used in machine learning + #: applications + #: to find the model parameters that correspond to the best fit between predicted and actual + #: outputs. + SGD = "SGD" + #: The multinomial Naive Bayes classifier is suitable for classification with discrete features + #: (e.g., word counts for text classification). + #: The multinomial distribution normally requires integer feature counts. However, in practice, + #: fractional counts such as tf-idf may also work. + MULTINOMIAL_NAIVE_BAYES = "MultinomialNaiveBayes" + #: Naive Bayes classifier for multivariate Bernoulli models. + BERNOULLI_NAIVE_BAYES = "BernoulliNaiveBayes" + #: A support vector machine (SVM) is a supervised machine learning model that uses classification + #: algorithms for two-group classification problems. + #: After giving an SVM model sets of labeled training data for each category, they're able to + #: categorize new text. + SVM = "SVM" + #: A support vector machine (SVM) is a supervised machine learning model that uses classification + #: algorithms for two-group classification problems. + #: After giving an SVM model sets of labeled training data for each category, they're able to + #: categorize new text. + #: Linear SVM performs best when input data is linear, i.e., data can be easily classified by + #: drawing the straight line between classified values on a plotted graph. + LINEAR_SVM = "LinearSVM" + #: K-nearest neighbors (KNN) algorithm uses 'feature similarity' to predict the values of new + #: datapoints + #: which further means that the new data point will be assigned a value based on how closely it + #: matches the points in the training set. + KNN = "KNN" + #: Decision Trees are a non-parametric supervised learning method used for both classification and + #: regression tasks. + #: The goal is to create a model that predicts the value of a target variable by learning simple + #: decision rules inferred from the data features. + DECISION_TREE = "DecisionTree" + #: Random forest is a supervised learning algorithm. + #: The "forest"\ it builds, is an ensemble of decision trees, usually trained with the “bagging”\ + #: method. + #: The general idea of the bagging method is that a combination of learning models increases the + #: overall result. + RANDOM_FOREST = "RandomForest" + #: Extreme Trees is an ensemble machine learning algorithm that combines the predictions from many + #: decision trees. It is related to the widely used random forest algorithm. + EXTREME_RANDOM_TREES = "ExtremeRandomTrees" + #: LightGBM is a gradient boosting framework that uses tree based learning algorithms. + LIGHT_GBM = "LightGBM" + #: The technique of transiting week learners into a strong learner is called Boosting. The + #: gradient boosting algorithm process works on this theory of execution. + GRADIENT_BOOSTING = "GradientBoosting" + #: XGBoost: Extreme Gradient Boosting Algorithm. This algorithm is used for structured data where + #: target column values can be divided into distinct class values. + XG_BOOST_CLASSIFIER = "XGBoostClassifier" + + +class ClassificationMultilabelPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Primary metrics for classification multilabel tasks.""" + + #: AUC is the Area under the curve. + #: This metric represents arithmetic mean of the score for each class, + #: weighted by the number of true instances in each class. + AUC_WEIGHTED = "AUCWeighted" + #: Accuracy is the ratio of predictions that exactly match the true class labels. + ACCURACY = "Accuracy" + #: Normalized macro recall is recall macro-averaged and normalized, so that random + #: performance has a score of 0, and perfect performance has a score of 1. + NORM_MACRO_RECALL = "NormMacroRecall" + #: The arithmetic mean of the average precision score for each class, weighted by + #: the number of true instances in each class. + AVERAGE_PRECISION_SCORE_WEIGHTED = "AveragePrecisionScoreWeighted" + #: The arithmetic mean of precision for each class, weighted by number of true instances in each + #: class. + PRECISION_SCORE_WEIGHTED = "PrecisionScoreWeighted" + #: Intersection Over Union. Intersection of predictions divided by union of predictions. + IOU = "IOU" + + +class ClassificationPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Primary metrics for classification tasks.""" + + #: AUC is the Area under the curve. + #: This metric represents arithmetic mean of the score for each class, + #: weighted by the number of true instances in each class. + AUC_WEIGHTED = "AUCWeighted" + #: Accuracy is the ratio of predictions that exactly match the true class labels. + ACCURACY = "Accuracy" + #: Normalized macro recall is recall macro-averaged and normalized, so that random + #: performance has a score of 0, and perfect performance has a score of 1. + NORM_MACRO_RECALL = "NormMacroRecall" + #: The arithmetic mean of the average precision score for each class, weighted by + #: the number of true instances in each class. + AVERAGE_PRECISION_SCORE_WEIGHTED = "AveragePrecisionScoreWeighted" + #: The arithmetic mean of precision for each class, weighted by number of true instances in each + #: class. + PRECISION_SCORE_WEIGHTED = "PrecisionScoreWeighted" + + +class ClusterPurpose(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Intended usage of the cluster.""" + + FAST_PROD = "FastProd" + DENSE_PROD = "DenseProd" + DEV_TEST = "DevTest" + + +class ComputeInstanceAuthorizationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The Compute Instance Authorization type. Available values are personal (default).""" + + PERSONAL = "personal" + + +class ComputeInstanceState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Current state of an ComputeInstance.""" + + CREATING = "Creating" + CREATE_FAILED = "CreateFailed" + DELETING = "Deleting" + RUNNING = "Running" + RESTARTING = "Restarting" + JOB_RUNNING = "JobRunning" + SETTING_UP = "SettingUp" + SETUP_FAILED = "SetupFailed" + STARTING = "Starting" + STOPPED = "Stopped" + STOPPING = "Stopping" + USER_SETTING_UP = "UserSettingUp" + USER_SETUP_FAILED = "UserSetupFailed" + UNKNOWN = "Unknown" + UNUSABLE = "Unusable" + + +class ComputePowerAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """[Required] The compute power action.""" + + START = "Start" + STOP = "Stop" + + +class ComputeProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The provision state of the cluster. Valid values are Unknown, Updating, Provisioning, + Succeeded, and Failed. + """ + + UNKNOWN = "Unknown" + UPDATING = "Updating" + CREATING = "Creating" + DELETING = "Deleting" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + + +class ComputeType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of compute.""" + + AKS = "AKS" + KUBERNETES = "Kubernetes" + AML_COMPUTE = "AmlCompute" + COMPUTE_INSTANCE = "ComputeInstance" + DATA_FACTORY = "DataFactory" + VIRTUAL_MACHINE = "VirtualMachine" + HD_INSIGHT = "HDInsight" + DATABRICKS = "Databricks" + DATA_LAKE_ANALYTICS = "DataLakeAnalytics" + SYNAPSE_SPARK = "SynapseSpark" + + +class ConnectionAuthType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Authentication type of the connection target.""" + + PAT = "PAT" + MANAGED_IDENTITY = "ManagedIdentity" + USERNAME_PASSWORD = "UsernamePassword" + NONE = "None" + SAS = "SAS" + SERVICE_PRINCIPAL = "ServicePrincipal" + ACCESS_KEY = "AccessKey" + + +class ContainerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ContainerType.""" + + STORAGE_INITIALIZER = "StorageInitializer" + INFERENCE_SERVER = "InferenceServer" + + +class CreatedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of identity that created the resource.""" + + USER = "User" + APPLICATION = "Application" + MANAGED_IDENTITY = "ManagedIdentity" + KEY = "Key" + + +class CredentialsType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the datastore credentials type.""" + + ACCOUNT_KEY = "AccountKey" + CERTIFICATE = "Certificate" + NONE = "None" + SAS = "Sas" + SERVICE_PRINCIPAL = "ServicePrincipal" + KERBEROS_KEYTAB = "KerberosKeytab" + KERBEROS_PASSWORD = "KerberosPassword" + + +class DatastoreType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the datastore contents type.""" + + AZURE_BLOB = "AzureBlob" + AZURE_DATA_LAKE_GEN1 = "AzureDataLakeGen1" + AZURE_DATA_LAKE_GEN2 = "AzureDataLakeGen2" + AZURE_FILE = "AzureFile" + HDFS = "Hdfs" + + +class DataType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the type of data.""" + + URI_FILE = "uri_file" + URI_FOLDER = "uri_folder" + MLTABLE = "mltable" + + +class DeploymentProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Possible values for DeploymentProvisioningState.""" + + CREATING = "Creating" + DELETING = "Deleting" + SCALING = "Scaling" + UPDATING = "Updating" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + + +class DiagnoseResultLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Level of workspace setup error.""" + + WARNING = "Warning" + ERROR = "Error" + INFORMATION = "Information" + + +class DistributionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the job distribution type.""" + + PY_TORCH = "PyTorch" + TENSOR_FLOW = "TensorFlow" + MPI = "Mpi" + + +class EarlyTerminationPolicyType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """EarlyTerminationPolicyType.""" + + BANDIT = "Bandit" + MEDIAN_STOPPING = "MedianStopping" + TRUNCATION_SELECTION = "TruncationSelection" + + +class EgressPublicNetworkAccessType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine whether PublicNetworkAccess is Enabled or Disabled for egress of a + deployment. + """ + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class EncryptionStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Indicates whether or not the encryption is enabled for the workspace.""" + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class EndpointAuthMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine endpoint authentication mode.""" + + AML_TOKEN = "AMLToken" + KEY = "Key" + AAD_TOKEN = "AADToken" + + +class EndpointComputeType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine endpoint compute type.""" + + MANAGED = "Managed" + KUBERNETES = "Kubernetes" + AZURE_ML_COMPUTE = "AzureMLCompute" + + +class EndpointProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of endpoint provisioning.""" + + CREATING = "Creating" + DELETING = "Deleting" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + UPDATING = "Updating" + CANCELED = "Canceled" + + +class EnvironmentType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Environment type is either user created or curated by Azure ML service.""" + + CURATED = "Curated" + USER_CREATED = "UserCreated" + + +class EnvironmentVariableType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the Environment Variable. Possible values are: local - For local variable.""" + + LOCAL = "local" + + +class ExportFormatType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The format of exported labels.""" + + DATASET = "Dataset" + COCO = "Coco" + CSV = "CSV" + + +class FeatureLags(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Flag for generating lags for the numeric features.""" + + #: No feature lags generated. + NONE = "None" + #: System auto-generates feature lags. + AUTO = "Auto" + + +class FeaturizationMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Featurization mode - determines data featurization mode.""" + + #: Auto mode, system performs featurization without any custom featurization inputs. + AUTO = "Auto" + #: Custom featurization. + CUSTOM = "Custom" + #: Featurization off. 'Forecasting' task cannot use this value. + OFF = "Off" + + +class ForecastHorizonMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine forecast horizon selection mode.""" + + #: Forecast horizon to be determined automatically. + AUTO = "Auto" + #: Use the custom forecast horizon. + CUSTOM = "Custom" + + +class ForecastingModels(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum for all forecasting models supported by AutoML.""" + + #: Auto-Autoregressive Integrated Moving Average (ARIMA) model uses time-series data and + #: statistical analysis to interpret the data and make future predictions. + #: This model aims to explain data by using time series data on its past values and uses linear + #: regression to make predictions. + AUTO_ARIMA = "AutoArima" + #: Prophet is a procedure for forecasting time series data based on an additive model where + #: non-linear trends are fit with yearly, weekly, and daily seasonality, plus holiday effects. + #: It works best with time series that have strong seasonal effects and several seasons of + #: historical data. Prophet is robust to missing data and shifts in the trend, and typically + #: handles outliers well. + PROPHET = "Prophet" + #: The Naive forecasting model makes predictions by carrying forward the latest target value for + #: each time-series in the training data. + NAIVE = "Naive" + #: The Seasonal Naive forecasting model makes predictions by carrying forward the latest season of + #: target values for each time-series in the training data. + SEASONAL_NAIVE = "SeasonalNaive" + #: The Average forecasting model makes predictions by carrying forward the average of the target + #: values for each time-series in the training data. + AVERAGE = "Average" + #: The Seasonal Average forecasting model makes predictions by carrying forward the average value + #: of the latest season of data for each time-series in the training data. + SEASONAL_AVERAGE = "SeasonalAverage" + #: Exponential smoothing is a time series forecasting method for univariate data that can be + #: extended to support data with a systematic trend or seasonal component. + EXPONENTIAL_SMOOTHING = "ExponentialSmoothing" + #: An Autoregressive Integrated Moving Average with Explanatory Variable (ARIMAX) model can be + #: viewed as a multiple regression model with one or more autoregressive (AR) terms and/or one or + #: more moving average (MA) terms. + #: This method is suitable for forecasting when data is stationary/non stationary, and + #: multivariate with any type of data pattern, i.e., level/trend /seasonality/cyclicity. + ARIMAX = "Arimax" + #: TCNForecaster: Temporal Convolutional Networks Forecaster. //TODO: Ask forecasting team for + #: brief intro. + TCN_FORECASTER = "TCNForecaster" + #: Elastic net is a popular type of regularized linear regression that combines two popular + #: penalties, specifically the L1 and L2 penalty functions. + ELASTIC_NET = "ElasticNet" + #: The technique of transiting week learners into a strong learner is called Boosting. The + #: gradient boosting algorithm process works on this theory of execution. + GRADIENT_BOOSTING = "GradientBoosting" + #: Decision Trees are a non-parametric supervised learning method used for both classification and + #: regression tasks. + #: The goal is to create a model that predicts the value of a target variable by learning simple + #: decision rules inferred from the data features. + DECISION_TREE = "DecisionTree" + #: K-nearest neighbors (KNN) algorithm uses 'feature similarity' to predict the values of new + #: datapoints + #: which further means that the new data point will be assigned a value based on how closely it + #: matches the points in the training set. + KNN = "KNN" + #: Lasso model fit with Least Angle Regression a.k.a. Lars. It is a Linear Model trained with an + #: L1 prior as regularizer. + LASSO_LARS = "LassoLars" + #: SGD: Stochastic gradient descent is an optimization algorithm often used in machine learning + #: applications + #: to find the model parameters that correspond to the best fit between predicted and actual + #: outputs. + #: It's an inexact but powerful technique. + SGD = "SGD" + #: Random forest is a supervised learning algorithm. + #: The "forest" it builds, is an ensemble of decision trees, usually trained with the “bagging” + #: method. + #: The general idea of the bagging method is that a combination of learning models increases the + #: overall result. + RANDOM_FOREST = "RandomForest" + #: Extreme Trees is an ensemble machine learning algorithm that combines the predictions from many + #: decision trees. It is related to the widely used random forest algorithm. + EXTREME_RANDOM_TREES = "ExtremeRandomTrees" + #: LightGBM is a gradient boosting framework that uses tree based learning algorithms. + LIGHT_GBM = "LightGBM" + #: XGBoostRegressor: Extreme Gradient Boosting Regressor is a supervised machine learning model + #: using ensemble of base learners. + XG_BOOST_REGRESSOR = "XGBoostRegressor" + + +class ForecastingPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Primary metrics for Forecasting task.""" + + #: The Spearman's rank coefficient of correlation is a non-parametric measure of rank correlation. + SPEARMAN_CORRELATION = "SpearmanCorrelation" + #: The Normalized Root Mean Squared Error (NRMSE) the RMSE facilitates the comparison between + #: models with different scales. + NORMALIZED_ROOT_MEAN_SQUARED_ERROR = "NormalizedRootMeanSquaredError" + #: The R2 score is one of the performance evaluation measures for forecasting-based machine + #: learning models. + R2_SCORE = "R2Score" + #: The Normalized Mean Absolute Error (NMAE) is a validation metric to compare the Mean Absolute + #: Error (MAE) of (time) series with different scales. + NORMALIZED_MEAN_ABSOLUTE_ERROR = "NormalizedMeanAbsoluteError" + + +class Goal(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Defines supported metric goals for hyperparameter tuning.""" + + MINIMIZE = "Minimize" + MAXIMIZE = "Maximize" + + +class IdentityConfigurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine identity framework.""" + + MANAGED = "Managed" + AML_TOKEN = "AMLToken" + USER_IDENTITY = "UserIdentity" + + +class ImageAnnotationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Annotation type of image data.""" + + CLASSIFICATION = "Classification" + BOUNDING_BOX = "BoundingBox" + INSTANCE_SEGMENTATION = "InstanceSegmentation" + + +class ImageType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the image. Possible values are: docker - For docker images. azureml - For AzureML + images. + """ + + DOCKER = "docker" + AZUREML = "azureml" + + +class InputDeliveryMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the input data delivery mode.""" + + READ_ONLY_MOUNT = "ReadOnlyMount" + READ_WRITE_MOUNT = "ReadWriteMount" + DOWNLOAD = "Download" + DIRECT = "Direct" + EVAL_MOUNT = "EvalMount" + EVAL_DOWNLOAD = "EvalDownload" + + +class InstanceSegmentationPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Primary metrics for InstanceSegmentation tasks.""" + + #: Mean Average Precision (MAP) is the average of AP (Average Precision). + #: AP is calculated for each class and averaged to get the MAP. + MEAN_AVERAGE_PRECISION = "MeanAveragePrecision" + + +class JobInputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the Job Input Type.""" + + LITERAL = "literal" + URI_FILE = "uri_file" + URI_FOLDER = "uri_folder" + MLTABLE = "mltable" + CUSTOM_MODEL = "custom_model" + MLFLOW_MODEL = "mlflow_model" + TRITON_MODEL = "triton_model" + + +class JobLimitsType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """JobLimitsType.""" + + COMMAND = "Command" + SWEEP = "Sweep" + + +class JobOutputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the Job Output Type.""" + + URI_FILE = "uri_file" + URI_FOLDER = "uri_folder" + MLTABLE = "mltable" + CUSTOM_MODEL = "custom_model" + MLFLOW_MODEL = "mlflow_model" + TRITON_MODEL = "triton_model" + + +class JobProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the job provisioning state.""" + + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + IN_PROGRESS = "InProgress" + + +class JobStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The status of a job.""" + + #: Run hasn't started yet. + NOT_STARTED = "NotStarted" + #: Run has started. The user has a run ID. + STARTING = "Starting" + #: (Not used currently) It will be used if ES is creating the compute target. + PROVISIONING = "Provisioning" + #: The run environment is being prepared. + PREPARING = "Preparing" + #: The job is queued in the compute target. For example, in BatchAI the job is in queued state, + #: while waiting for all required nodes to be ready. + QUEUED = "Queued" + #: The job started to run in the compute target. + RUNNING = "Running" + #: Job is completed in the target. It is in output collection state now. + FINALIZING = "Finalizing" + #: Cancellation has been requested for the job. + CANCEL_REQUESTED = "CancelRequested" + #: Job completed successfully. This reflects that both the job itself and output collection states + #: completed successfully + COMPLETED = "Completed" + #: Job failed. + FAILED = "Failed" + #: Following cancellation request, the job is now successfully canceled. + CANCELED = "Canceled" + #: When heartbeat is enabled, if the run isn't updating any information to RunHistory then the run + #: goes to NotResponding state. + #: NotResponding is the only state that is exempt from strict transition orders. A run can go from + #: NotResponding to any of the previous states. + NOT_RESPONDING = "NotResponding" + #: The job is paused by users. Some adjustment to labeling jobs can be made only in paused state. + PAUSED = "Paused" + #: Default job status if not mapped to all other statuses + UNKNOWN = "Unknown" + #: The job is in a scheduled state. Job is not in any active state. + SCHEDULED = "Scheduled" + + +class JobType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the type of job.""" + + AUTO_ML = "AutoML" + COMMAND = "Command" + LABELING = "Labeling" + SWEEP = "Sweep" + PIPELINE = "Pipeline" + SPARK = "Spark" + + +class KeyType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """KeyType.""" + + PRIMARY = "Primary" + SECONDARY = "Secondary" + + +class LearningRateScheduler(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Learning rate scheduler enum.""" + + #: No learning rate scheduler selected. + NONE = "None" + #: Cosine Annealing With Warmup. + WARMUP_COSINE = "WarmupCosine" + #: Step learning rate scheduler. + STEP = "Step" + + +class ListViewType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ListViewType.""" + + ACTIVE_ONLY = "ActiveOnly" + ARCHIVED_ONLY = "ArchivedOnly" + ALL = "All" + + +class LoadBalancerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Load Balancer Type.""" + + PUBLIC_IP = "PublicIp" + INTERNAL_LOAD_BALANCER = "InternalLoadBalancer" + + +class LogVerbosity(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum for setting log verbosity.""" + + #: No logs emitted. + NOT_SET = "NotSet" + #: Debug and above log statements logged. + DEBUG = "Debug" + #: Info and above log statements logged. + INFO = "Info" + #: Warning and above log statements logged. + WARNING = "Warning" + #: Error and above log statements logged. + ERROR = "Error" + #: Only critical statements logged. + CRITICAL = "Critical" + + +class ManagedServiceIdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of managed service identity (where both SystemAssigned and UserAssigned types are + allowed). + """ + + NONE = "None" + SYSTEM_ASSIGNED = "SystemAssigned" + USER_ASSIGNED = "UserAssigned" + SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned" + + +class MediaType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Media type of data asset.""" + + IMAGE = "Image" + TEXT = "Text" + + +class MLAssistConfigurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """MLAssistConfigurationType.""" + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class MlflowAutologger(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Indicates whether mlflow autologger is enabled for notebooks.""" + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class MLFlowAutologgerState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the state of mlflow autologger.""" + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class ModelSize(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Image model size.""" + + #: No value selected. + NONE = "None" + #: Small size. + SMALL = "Small" + #: Medium size. + MEDIUM = "Medium" + #: Large size. + LARGE = "Large" + #: Extra large size. + EXTRA_LARGE = "ExtraLarge" + + +class MountAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Mount Action.""" + + MOUNT = "Mount" + UNMOUNT = "Unmount" + + +class MountState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Mount state.""" + + MOUNT_REQUESTED = "MountRequested" + MOUNTED = "Mounted" + MOUNT_FAILED = "MountFailed" + UNMOUNT_REQUESTED = "UnmountRequested" + UNMOUNT_FAILED = "UnmountFailed" + UNMOUNTED = "Unmounted" + + +class NCrossValidationsMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Determines how N-Cross validations value is determined.""" + + #: Determine N-Cross validations value automatically. Supported only for 'Forecasting' AutoML + #: task. + AUTO = "Auto" + #: Use custom N-Cross validations value. + CUSTOM = "Custom" + + +class Network(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """network of this container.""" + + BRIDGE = "Bridge" + HOST = "Host" + + +class NlpLearningRateScheduler(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum of learning rate schedulers that aligns with those supported by HF.""" + + #: No learning rate schedule. + NONE = "None" + #: Linear warmup and decay. + LINEAR = "Linear" + #: Linear warmup then cosine decay. + COSINE = "Cosine" + #: Linear warmup, cosine decay, then restart to initial LR. + COSINE_WITH_RESTARTS = "CosineWithRestarts" + #: Increase linearly then polynomially decay. + POLYNOMIAL = "Polynomial" + #: Constant learning rate. + CONSTANT = "Constant" + #: Linear warmup followed by constant value. + CONSTANT_WITH_WARMUP = "ConstantWithWarmup" + + +class NodeState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of the compute node. Values are idle, running, preparing, unusable, leaving and + preempted. + """ + + IDLE = "idle" + RUNNING = "running" + PREPARING = "preparing" + UNUSABLE = "unusable" + LEAVING = "leaving" + PREEMPTED = "preempted" + + +class NodesValueType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The enumerated types for the nodes value.""" + + ALL = "All" + CUSTOM = "Custom" + + +class ObjectDetectionPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Primary metrics for Image ObjectDetection task.""" + + #: Mean Average Precision (MAP) is the average of AP (Average Precision). + #: AP is calculated for each class and averaged to get the MAP. + MEAN_AVERAGE_PRECISION = "MeanAveragePrecision" + + +class OperatingSystemType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of operating system.""" + + LINUX = "Linux" + WINDOWS = "Windows" + + +class OperationName(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Name of the last operation.""" + + CREATE = "Create" + START = "Start" + STOP = "Stop" + RESTART = "Restart" + REIMAGE = "Reimage" + DELETE = "Delete" + + +class OperationStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Operation status.""" + + IN_PROGRESS = "InProgress" + SUCCEEDED = "Succeeded" + CREATE_FAILED = "CreateFailed" + START_FAILED = "StartFailed" + STOP_FAILED = "StopFailed" + RESTART_FAILED = "RestartFailed" + REIMAGE_FAILED = "ReimageFailed" + DELETE_FAILED = "DeleteFailed" + + +class OperationTrigger(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Trigger of operation.""" + + USER = "User" + SCHEDULE = "Schedule" + IDLE_SHUTDOWN = "IdleShutdown" + + +class OrderString(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """OrderString.""" + + CREATED_AT_DESC = "CreatedAtDesc" + CREATED_AT_ASC = "CreatedAtAsc" + UPDATED_AT_DESC = "UpdatedAtDesc" + UPDATED_AT_ASC = "UpdatedAtAsc" + + +class OsType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Compute OS Type.""" + + LINUX = "Linux" + WINDOWS = "Windows" + + +class OutputDeliveryMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Output data delivery mode enums.""" + + READ_WRITE_MOUNT = "ReadWriteMount" + UPLOAD = "Upload" + DIRECT = "Direct" + + +class PrivateEndpointConnectionProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The current provisioning state.""" + + SUCCEEDED = "Succeeded" + CREATING = "Creating" + DELETING = "Deleting" + FAILED = "Failed" + + +class PrivateEndpointServiceConnectionStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The private endpoint connection status.""" + + PENDING = "Pending" + APPROVED = "Approved" + REJECTED = "Rejected" + DISCONNECTED = "Disconnected" + TIMEOUT = "Timeout" + + +class Protocol(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Protocol over which communication will happen over this endpoint.""" + + TCP = "tcp" + UDP = "udp" + HTTP = "http" + + +class ProvisioningStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The current deployment state of schedule.""" + + COMPLETED = "Completed" + PROVISIONING = "Provisioning" + FAILED = "Failed" + + +class PublicNetworkAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Whether requests from Public Network are allowed.""" + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class PublicNetworkAccessType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine whether PublicNetworkAccess is Enabled or Disabled.""" + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class QuotaUnit(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """An enum describing the unit of quota measurement.""" + + COUNT = "Count" + + +class RandomSamplingAlgorithmRule(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The specific type of random algorithm.""" + + RANDOM = "Random" + SOBOL = "Sobol" + + +class RecurrenceFrequency(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to describe the frequency of a recurrence schedule.""" + + #: Minute frequency + MINUTE = "Minute" + #: Hour frequency + HOUR = "Hour" + #: Day frequency + DAY = "Day" + #: Week frequency + WEEK = "Week" + #: Month frequency + MONTH = "Month" + + +class ReferenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine which reference method to use for an asset.""" + + ID = "Id" + DATA_PATH = "DataPath" + OUTPUT_PATH = "OutputPath" + + +class RegressionModels(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum for all Regression models supported by AutoML.""" + + #: Elastic net is a popular type of regularized linear regression that combines two popular + #: penalties, specifically the L1 and L2 penalty functions. + ELASTIC_NET = "ElasticNet" + #: The technique of transiting week learners into a strong learner is called Boosting. The + #: gradient boosting algorithm process works on this theory of execution. + GRADIENT_BOOSTING = "GradientBoosting" + #: Decision Trees are a non-parametric supervised learning method used for both classification and + #: regression tasks. + #: The goal is to create a model that predicts the value of a target variable by learning simple + #: decision rules inferred from the data features. + DECISION_TREE = "DecisionTree" + #: K-nearest neighbors (KNN) algorithm uses 'feature similarity' to predict the values of new + #: datapoints + #: which further means that the new data point will be assigned a value based on how closely it + #: matches the points in the training set. + KNN = "KNN" + #: Lasso model fit with Least Angle Regression a.k.a. Lars. It is a Linear Model trained with an + #: L1 prior as regularizer. + LASSO_LARS = "LassoLars" + #: SGD: Stochastic gradient descent is an optimization algorithm often used in machine learning + #: applications + #: to find the model parameters that correspond to the best fit between predicted and actual + #: outputs. + #: It's an inexact but powerful technique. + SGD = "SGD" + #: Random forest is a supervised learning algorithm. + #: The "forest"\ it builds, is an ensemble of decision trees, usually trained with the “bagging”\ + #: method. + #: The general idea of the bagging method is that a combination of learning models increases the + #: overall result. + RANDOM_FOREST = "RandomForest" + #: Extreme Trees is an ensemble machine learning algorithm that combines the predictions from many + #: decision trees. It is related to the widely used random forest algorithm. + EXTREME_RANDOM_TREES = "ExtremeRandomTrees" + #: LightGBM is a gradient boosting framework that uses tree based learning algorithms. + LIGHT_GBM = "LightGBM" + #: XGBoostRegressor: Extreme Gradient Boosting Regressor is a supervised machine learning model + #: using ensemble of base learners. + XG_BOOST_REGRESSOR = "XGBoostRegressor" + + +class RegressionPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Primary metrics for Regression task.""" + + #: The Spearman's rank coefficient of correlation is a nonparametric measure of rank correlation. + SPEARMAN_CORRELATION = "SpearmanCorrelation" + #: The Normalized Root Mean Squared Error (NRMSE) the RMSE facilitates the comparison between + #: models with different scales. + NORMALIZED_ROOT_MEAN_SQUARED_ERROR = "NormalizedRootMeanSquaredError" + #: The R2 score is one of the performance evaluation measures for forecasting-based machine + #: learning models. + R2_SCORE = "R2Score" + #: The Normalized Mean Absolute Error (NMAE) is a validation metric to compare the Mean Absolute + #: Error (MAE) of (time) series with different scales. + NORMALIZED_MEAN_ABSOLUTE_ERROR = "NormalizedMeanAbsoluteError" + + +class RemoteLoginPortPublicAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh + port is closed on all nodes of the cluster. Enabled - Indicates that the public ssh port is + open on all nodes of the cluster. NotSpecified - Indicates that the public ssh port is closed + on all nodes of the cluster if VNet is defined, else is open all public nodes. It can be + default only during cluster creation time, after creation it will be either enabled or + disabled. + """ + + ENABLED = "Enabled" + DISABLED = "Disabled" + NOT_SPECIFIED = "NotSpecified" + + +class SamplingAlgorithmType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """SamplingAlgorithmType.""" + + GRID = "Grid" + RANDOM = "Random" + BAYESIAN = "Bayesian" + + +class ScaleType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ScaleType.""" + + DEFAULT = "Default" + TARGET_UTILIZATION = "TargetUtilization" + + +class ScheduleActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ScheduleActionType.""" + + CREATE_JOB = "CreateJob" + INVOKE_BATCH_ENDPOINT = "InvokeBatchEndpoint" + + +class ScheduleListViewType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ScheduleListViewType.""" + + ENABLED_ONLY = "EnabledOnly" + DISABLED_ONLY = "DisabledOnly" + ALL = "All" + + +class ScheduleProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The current deployment state of schedule.""" + + COMPLETED = "Completed" + PROVISIONING = "Provisioning" + FAILED = "Failed" + + +class ScheduleProvisioningStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ScheduleProvisioningStatus.""" + + CREATING = "Creating" + UPDATING = "Updating" + DELETING = "Deleting" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + + +class ScheduleStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Is the schedule enabled or disabled?.""" + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class SeasonalityMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Forecasting seasonality mode.""" + + #: Seasonality to be determined automatically. + AUTO = "Auto" + #: Use the custom seasonality value. + CUSTOM = "Custom" + + +class SecretsType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the datastore secrets type.""" + + ACCOUNT_KEY = "AccountKey" + CERTIFICATE = "Certificate" + SAS = "Sas" + SERVICE_PRINCIPAL = "ServicePrincipal" + KERBEROS_PASSWORD = "KerberosPassword" + KERBEROS_KEYTAB = "KerberosKeytab" + + +class ServiceDataAccessAuthIdentity(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ServiceDataAccessAuthIdentity.""" + + #: Do not use any identity for service data access. + NONE = "None" + #: Use the system assigned managed identity of the Workspace to authenticate service data access. + WORKSPACE_SYSTEM_ASSIGNED_IDENTITY = "WorkspaceSystemAssignedIdentity" + #: Use the user assigned managed identity of the Workspace to authenticate service data access. + WORKSPACE_USER_ASSIGNED_IDENTITY = "WorkspaceUserAssignedIdentity" + + +class ShortSeriesHandlingConfiguration(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The parameter defining how if AutoML should handle short time series.""" + + #: Represents no/null value. + NONE = "None" + #: Short series will be padded if there are no long series, otherwise short series will be + #: dropped. + AUTO = "Auto" + #: All the short series will be padded. + PAD = "Pad" + #: All the short series will be dropped. + DROP = "Drop" + + +class SkuScaleType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Node scaling setting for the compute sku.""" + + #: Automatically scales node count. + AUTOMATIC = "Automatic" + #: Node count scaled upon user request. + MANUAL = "Manual" + #: Fixed set of nodes. + NONE = "None" + + +class SkuTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """This field is required to be implemented by the Resource Provider if the service has more than + one tier, but is not required on a PUT. + """ + + FREE = "Free" + BASIC = "Basic" + STANDARD = "Standard" + PREMIUM = "Premium" + + +class SourceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Data source type.""" + + DATASET = "Dataset" + DATASTORE = "Datastore" + URI = "URI" + + +class SparkJobEntryType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """SparkJobEntryType.""" + + SPARK_JOB_PYTHON_ENTRY = "SparkJobPythonEntry" + SPARK_JOB_SCALA_ENTRY = "SparkJobScalaEntry" + + +class SshPublicAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh + port is closed on this instance. Enabled - Indicates that the public ssh port is open and + accessible according to the VNet/subnet policy if applicable. + """ + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class SslConfigStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enable or disable ssl for scoring.""" + + DISABLED = "Disabled" + ENABLED = "Enabled" + AUTO = "Auto" + + +class StackMetaLearnerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The meta-learner is a model trained on the output of the individual heterogeneous models. + Default meta-learners are LogisticRegression for classification tasks (or LogisticRegressionCV + if cross-validation is enabled) and ElasticNet for regression/forecasting tasks (or + ElasticNetCV if cross-validation is enabled). + This parameter can be one of the following strings: LogisticRegression, LogisticRegressionCV, + LightGBMClassifier, ElasticNet, ElasticNetCV, LightGBMRegressor, or LinearRegression. + """ + + NONE = "None" + #: Default meta-learners are LogisticRegression for classification tasks. + LOGISTIC_REGRESSION = "LogisticRegression" + #: Default meta-learners are LogisticRegression for classification task when CV is on. + LOGISTIC_REGRESSION_CV = "LogisticRegressionCV" + LIGHT_GBM_CLASSIFIER = "LightGBMClassifier" + #: Default meta-learners are LogisticRegression for regression task. + ELASTIC_NET = "ElasticNet" + #: Default meta-learners are LogisticRegression for regression task when CV is on. + ELASTIC_NET_CV = "ElasticNetCV" + LIGHT_GBM_REGRESSOR = "LightGBMRegressor" + LINEAR_REGRESSION = "LinearRegression" + + +class Status(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Status of update workspace quota.""" + + UNDEFINED = "Undefined" + SUCCESS = "Success" + FAILURE = "Failure" + INVALID_QUOTA_BELOW_CLUSTER_MINIMUM = "InvalidQuotaBelowClusterMinimum" + INVALID_QUOTA_EXCEEDS_SUBSCRIPTION_LIMIT = "InvalidQuotaExceedsSubscriptionLimit" + INVALID_VM_FAMILY_NAME = "InvalidVMFamilyName" + OPERATION_NOT_SUPPORTED_FOR_SKU = "OperationNotSupportedForSku" + OPERATION_NOT_ENABLED_FOR_REGION = "OperationNotEnabledForRegion" + + +class StatusMessageLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """StatusMessageLevel.""" + + ERROR = "Error" + INFORMATION = "Information" + WARNING = "Warning" + + +class StochasticOptimizer(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Stochastic optimizer for image models.""" + + #: No optimizer selected. + NONE = "None" + #: Stochastic Gradient Descent optimizer. + SGD = "Sgd" + #: Adam is algorithm the optimizes stochastic objective functions based on adaptive estimates of + #: moments + ADAM = "Adam" + #: AdamW is a variant of the optimizer Adam that has an improved implementation of weight decay. + ADAMW = "Adamw" + + +class StorageAccountType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """type of this storage account.""" + + STANDARD_LRS = "Standard_LRS" + PREMIUM_LRS = "Premium_LRS" + + +class TargetAggregationFunction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Target aggregate function.""" + + #: Represent no value set. + NONE = "None" + SUM = "Sum" + MAX = "Max" + MIN = "Min" + MEAN = "Mean" + + +class TargetLagsMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Target lags selection modes.""" + + #: Target lags to be determined automatically. + AUTO = "Auto" + #: Use the custom target lags. + CUSTOM = "Custom" + + +class TargetRollingWindowSizeMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Target rolling windows size mode.""" + + #: Determine rolling windows size automatically. + AUTO = "Auto" + #: Use the specified rolling window size. + CUSTOM = "Custom" + + +class TaskType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """AutoMLJob Task type.""" + + #: Classification in machine learning and statistics is a supervised learning approach in which + #: the computer program learns from the data given to it and make new observations or + #: classifications. + CLASSIFICATION = "Classification" + #: Regression means to predict the value using the input data. Regression models are used to + #: predict a continuous value. + REGRESSION = "Regression" + #: Forecasting is a special kind of regression task that deals with time-series data and creates + #: forecasting model + #: that can be used to predict the near future values based on the inputs. + FORECASTING = "Forecasting" + #: Image Classification. Multi-class image classification is used when an image is classified with + #: only a single label + #: from a set of classes - e.g. each image is classified as either an image of a 'cat' or a 'dog' + #: or a 'duck'. + IMAGE_CLASSIFICATION = "ImageClassification" + #: Image Classification Multilabel. Multi-label image classification is used when an image could + #: have one or more labels + #: from a set of labels - e.g. an image could be labeled with both 'cat' and 'dog'. + IMAGE_CLASSIFICATION_MULTILABEL = "ImageClassificationMultilabel" + #: Image Object Detection. Object detection is used to identify objects in an image and locate + #: each object with a + #: bounding box e.g. locate all dogs and cats in an image and draw a bounding box around each. + IMAGE_OBJECT_DETECTION = "ImageObjectDetection" + #: Image Instance Segmentation. Instance segmentation is used to identify objects in an image at + #: the pixel level, + #: drawing a polygon around each object in the image. + IMAGE_INSTANCE_SEGMENTATION = "ImageInstanceSegmentation" + #: Text classification (also known as text tagging or text categorization) is the process of + #: sorting texts into categories. + #: Categories are mutually exclusive. + TEXT_CLASSIFICATION = "TextClassification" + #: Multilabel classification task assigns each sample to a group (zero or more) of target labels. + TEXT_CLASSIFICATION_MULTILABEL = "TextClassificationMultilabel" + #: Text Named Entity Recognition a.k.a. TextNER. + #: Named Entity Recognition (NER) is the ability to take free-form text and identify the + #: occurrences of entities such as people, locations, organizations, and more. + TEXT_NER = "TextNER" + + +class TextAnnotationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Annotation type of text data.""" + + CLASSIFICATION = "Classification" + NAMED_ENTITY_RECOGNITION = "NamedEntityRecognition" + + +class TriggerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """TriggerType.""" + + RECURRENCE = "Recurrence" + CRON = "Cron" + + +class UnderlyingResourceAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """UnderlyingResourceAction.""" + + DELETE = "Delete" + DETACH = "Detach" + + +class UnitOfMeasure(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The unit of time measurement for the specified VM price. Example: OneHour.""" + + ONE_HOUR = "OneHour" + + +class UsageUnit(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """An enum describing the unit of usage measurement.""" + + COUNT = "Count" + + +class UseStl(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Configure STL Decomposition of the time-series target column.""" + + #: No stl decomposition. + NONE = "None" + SEASON = "Season" + SEASON_TREND = "SeasonTrend" + + +class ValidationMetricType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Metric computation method to use for validation metrics in image tasks.""" + + #: No metric. + NONE = "None" + #: Coco metric. + COCO = "Coco" + #: Voc metric. + VOC = "Voc" + #: CocoVoc metric. + COCO_VOC = "CocoVoc" + + +class ValueFormat(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """format for the workspace connection value.""" + + JSON = "JSON" + + +class VMPriceOSType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Operating system type used by the VM.""" + + LINUX = "Linux" + WINDOWS = "Windows" + + +class VmPriority(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Virtual Machine priority.""" + + DEDICATED = "Dedicated" + LOW_PRIORITY = "LowPriority" + + +class VMTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of the VM.""" + + STANDARD = "Standard" + LOW_PRIORITY = "LowPriority" + SPOT = "Spot" + + +class VolumeDefinitionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of Volume Definition. Possible Values: bind,volume,tmpfs,npipe.""" + + BIND = "bind" + VOLUME = "volume" + TMPFS = "tmpfs" + NPIPE = "npipe" + + +class WeekDay(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum of weekday.""" + + #: Monday weekday + MONDAY = "Monday" + #: Tuesday weekday + TUESDAY = "Tuesday" + #: Wednesday weekday + WEDNESDAY = "Wednesday" + #: Thursday weekday + THURSDAY = "Thursday" + #: Friday weekday + FRIDAY = "Friday" + #: Saturday weekday + SATURDAY = "Saturday" + #: Sunday weekday + SUNDAY = "Sunday" + + +class WorkspaceProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The current deployment state of workspace resource. The provisioningState is to indicate states + for resource provisioning. + """ + + UNKNOWN = "Unknown" + UPDATING = "Updating" + CREATING = "Creating" + DELETING = "Deleting" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + SOFT_DELETED = "SoftDeleted" diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py deleted file mode 100644 index efe5686a0c99..000000000000 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py +++ /dev/null @@ -1,265 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from enum import Enum, EnumMeta -from six import with_metaclass - -class _CaseInsensitiveEnumMeta(EnumMeta): - def __getitem__(self, name): - return super().__getitem__(name.upper()) - - def __getattr__(cls, name): - """Return the enum member matching `name` - We use __getattr__ instead of descriptors or inserting into the enum - class' __dict__ in order to support `name` and `value` being both - properties for enum members (which live in the class' __dict__) and - enum members themselves. - """ - try: - return cls._member_map_[name.upper()] - except KeyError: - raise AttributeError(name) - - -class AllocationState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """Allocation state of the compute. Possible values are: steady - Indicates that the compute is - not resizing. There are no changes to the number of compute nodes in the compute in progress. A - compute enters this state when it is created and when no operations are being performed on the - compute to change the number of compute nodes. resizing - Indicates that the compute is - resizing; that is, compute nodes are being added to or removed from the compute. - """ - - STEADY = "Steady" - RESIZING = "Resizing" - -class ApplicationSharingPolicy(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """Policy for sharing applications on this compute instance among users of parent workspace. If - Personal, only the creator can access applications on this compute instance. When Shared, any - workspace user can access applications on this instance depending on his/her assigned role. - """ - - PERSONAL = "Personal" - SHARED = "Shared" - -class BillingCurrency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """Three lettered code specifying the currency of the VM price. Example: USD - """ - - USD = "USD" - -class ComputeInstanceState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """Current state of a ComputeInstance. - """ - - CREATING = "Creating" - CREATE_FAILED = "CreateFailed" - DELETING = "Deleting" - RUNNING = "Running" - RESTARTING = "Restarting" - JOB_RUNNING = "JobRunning" - SETTING_UP = "SettingUp" - SETUP_FAILED = "SetupFailed" - STARTING = "Starting" - STOPPED = "Stopped" - STOPPING = "Stopping" - USER_SETTING_UP = "UserSettingUp" - USER_SETUP_FAILED = "UserSetupFailed" - UNKNOWN = "Unknown" - UNUSABLE = "Unusable" - -class ComputeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The type of compute - """ - - AKS = "AKS" - AML_COMPUTE = "AmlCompute" - COMPUTE_INSTANCE = "ComputeInstance" - DATA_FACTORY = "DataFactory" - VIRTUAL_MACHINE = "VirtualMachine" - HD_INSIGHT = "HDInsight" - DATABRICKS = "Databricks" - DATA_LAKE_ANALYTICS = "DataLakeAnalytics" - -class EncryptionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """Indicates whether or not the encryption is enabled for the workspace. - """ - - ENABLED = "Enabled" - DISABLED = "Disabled" - -class NodeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """State of the compute node. Values are idle, running, preparing, unusable, leaving and - preempted. - """ - - IDLE = "idle" - RUNNING = "running" - PREPARING = "preparing" - UNUSABLE = "unusable" - LEAVING = "leaving" - PREEMPTED = "preempted" - -class OperationName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """Name of the last operation. - """ - - CREATE = "Create" - START = "Start" - STOP = "Stop" - RESTART = "Restart" - REIMAGE = "Reimage" - DELETE = "Delete" - -class OperationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """Operation status. - """ - - IN_PROGRESS = "InProgress" - SUCCEEDED = "Succeeded" - CREATE_FAILED = "CreateFailed" - START_FAILED = "StartFailed" - STOP_FAILED = "StopFailed" - RESTART_FAILED = "RestartFailed" - REIMAGE_FAILED = "ReimageFailed" - DELETE_FAILED = "DeleteFailed" - -class PrivateEndpointConnectionProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The current provisioning state. - """ - - SUCCEEDED = "Succeeded" - CREATING = "Creating" - DELETING = "Deleting" - FAILED = "Failed" - -class PrivateEndpointServiceConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The private endpoint connection status. - """ - - PENDING = "Pending" - APPROVED = "Approved" - REJECTED = "Rejected" - DISCONNECTED = "Disconnected" - TIMEOUT = "Timeout" - -class ProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The current deployment state of workspace resource. The provisioningState is to indicate states - for resource provisioning. - """ - - UNKNOWN = "Unknown" - UPDATING = "Updating" - CREATING = "Creating" - DELETING = "Deleting" - SUCCEEDED = "Succeeded" - FAILED = "Failed" - CANCELED = "Canceled" - -class QuotaUnit(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """An enum describing the unit of quota measurement. - """ - - COUNT = "Count" - -class ReasonCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The reason for the restriction. - """ - - NOT_SPECIFIED = "NotSpecified" - NOT_AVAILABLE_FOR_REGION = "NotAvailableForRegion" - NOT_AVAILABLE_FOR_SUBSCRIPTION = "NotAvailableForSubscription" - -class RemoteLoginPortPublicAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh - port is closed on all nodes of the cluster. Enabled - Indicates that the public ssh port is - open on all nodes of the cluster. NotSpecified - Indicates that the public ssh port is closed - on all nodes of the cluster if VNet is defined, else is open all public nodes. It can be - default only during cluster creation time, after creation it will be either enabled or - disabled. - """ - - ENABLED = "Enabled" - DISABLED = "Disabled" - NOT_SPECIFIED = "NotSpecified" - -class ResourceIdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The identity type. - """ - - SYSTEM_ASSIGNED = "SystemAssigned" - USER_ASSIGNED = "UserAssigned" - SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned" - NONE = "None" - -class SshPublicAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh - port is closed on this instance. Enabled - Indicates that the public ssh port is open and - accessible according to the VNet/subnet policy if applicable. - """ - - ENABLED = "Enabled" - DISABLED = "Disabled" - -class SslConfigurationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """Enable or disable ssl for scoring - """ - - DISABLED = "Disabled" - ENABLED = "Enabled" - -class Status(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """Status of update workspace quota. - """ - - UNDEFINED = "Undefined" - SUCCESS = "Success" - FAILURE = "Failure" - INVALID_QUOTA_BELOW_CLUSTER_MINIMUM = "InvalidQuotaBelowClusterMinimum" - INVALID_QUOTA_EXCEEDS_SUBSCRIPTION_LIMIT = "InvalidQuotaExceedsSubscriptionLimit" - INVALID_VM_FAMILY_NAME = "InvalidVMFamilyName" - OPERATION_NOT_SUPPORTED_FOR_SKU = "OperationNotSupportedForSku" - OPERATION_NOT_ENABLED_FOR_REGION = "OperationNotEnabledForRegion" - -class UnderlyingResourceAction(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - - DELETE = "Delete" - DETACH = "Detach" - -class UnitOfMeasure(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The unit of time measurement for the specified VM price. Example: OneHour - """ - - ONE_HOUR = "OneHour" - -class UsageUnit(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """An enum describing the unit of usage measurement. - """ - - COUNT = "Count" - -class VMPriceOSType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """Operating system type used by the VM. - """ - - LINUX = "Linux" - WINDOWS = "Windows" - -class VmPriority(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """Virtual Machine priority - """ - - DEDICATED = "Dedicated" - LOW_PRIORITY = "LowPriority" - -class VMTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The type of the VM. - """ - - STANDARD = "Standard" - LOW_PRIORITY = "LowPriority" - SPOT = "Spot" diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models.py deleted file mode 100644 index d95b81b6554e..000000000000 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models.py +++ /dev/null @@ -1,3667 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.core.exceptions import HttpResponseError -import msrest.serialization - - -class Compute(msrest.serialization.Model): - """Machine Learning compute object. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AKS, AmlCompute, ComputeInstance, DataFactory, DataLakeAnalytics, Databricks, HDInsight, VirtualMachine. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - } - - _subtype_map = { - 'compute_type': {'AKS': 'AKS', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', 'DataFactory': 'DataFactory', 'DataLakeAnalytics': 'DataLakeAnalytics', 'Databricks': 'Databricks', 'HDInsight': 'HDInsight', 'VirtualMachine': 'VirtualMachine'} - } - - def __init__( - self, - **kwargs - ): - super(Compute, self).__init__(**kwargs) - self.compute_type = None # type: Optional[str] - self.compute_location = kwargs.get('compute_location', None) - self.provisioning_state = None - self.description = kwargs.get('description', None) - self.created_on = None - self.modified_on = None - self.resource_id = kwargs.get('resource_id', None) - self.provisioning_errors = None - self.is_attached_compute = None - - -class AKS(Compute): - """A Machine Learning compute based on AKS. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :param properties: AKS properties. - :type properties: ~azure.mgmt.machinelearningservices.models.AKSProperties - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'AKSProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(AKS, self).__init__(**kwargs) - self.compute_type = 'AKS' # type: str - self.properties = kwargs.get('properties', None) - - -class ComputeSecrets(msrest.serialization.Model): - """Secrets related to a Machine Learning compute. Might differ for every type of compute. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AksComputeSecrets, DatabricksComputeSecrets, VirtualMachineSecrets. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - """ - - _validation = { - 'compute_type': {'required': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - } - - _subtype_map = { - 'compute_type': {'AKS': 'AksComputeSecrets', 'Databricks': 'DatabricksComputeSecrets', 'VirtualMachine': 'VirtualMachineSecrets'} - } - - def __init__( - self, - **kwargs - ): - super(ComputeSecrets, self).__init__(**kwargs) - self.compute_type = None # type: Optional[str] - - -class AksComputeSecrets(ComputeSecrets): - """Secrets related to a Machine Learning compute based on AKS. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param user_kube_config: Content of kubeconfig file that can be used to connect to the - Kubernetes cluster. - :type user_kube_config: str - :param admin_kube_config: Content of kubeconfig file that can be used to connect to the - Kubernetes cluster. - :type admin_kube_config: str - :param image_pull_secret_name: Image registry pull secret. - :type image_pull_secret_name: str - """ - - _validation = { - 'compute_type': {'required': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'}, - 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'}, - 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AksComputeSecrets, self).__init__(**kwargs) - self.compute_type = 'AKS' # type: str - self.user_kube_config = kwargs.get('user_kube_config', None) - self.admin_kube_config = kwargs.get('admin_kube_config', None) - self.image_pull_secret_name = kwargs.get('image_pull_secret_name', None) - - -class AksNetworkingConfiguration(msrest.serialization.Model): - """Advance configuration for AKS networking. - - :param subnet_id: Virtual network subnet resource ID the compute nodes belong to. - :type subnet_id: str - :param service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It must - not overlap with any Subnet IP ranges. - :type service_cidr: str - :param dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be within - the Kubernetes service address range specified in serviceCidr. - :type dns_service_ip: str - :param docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It - must not overlap with any Subnet IP ranges or the Kubernetes service address range. - :type docker_bridge_cidr: str - """ - - _validation = { - 'service_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'}, - 'dns_service_ip': {'pattern': r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'}, - 'docker_bridge_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'}, - } - - _attribute_map = { - 'subnet_id': {'key': 'subnetId', 'type': 'str'}, - 'service_cidr': {'key': 'serviceCidr', 'type': 'str'}, - 'dns_service_ip': {'key': 'dnsServiceIP', 'type': 'str'}, - 'docker_bridge_cidr': {'key': 'dockerBridgeCidr', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AksNetworkingConfiguration, self).__init__(**kwargs) - self.subnet_id = kwargs.get('subnet_id', None) - self.service_cidr = kwargs.get('service_cidr', None) - self.dns_service_ip = kwargs.get('dns_service_ip', None) - self.docker_bridge_cidr = kwargs.get('docker_bridge_cidr', None) - - -class AKSProperties(msrest.serialization.Model): - """AKS properties. - - Variables are only populated by the server, and will be ignored when sending a request. - - :param cluster_fqdn: Cluster full qualified domain name. - :type cluster_fqdn: str - :ivar system_services: System services. - :vartype system_services: list[~azure.mgmt.machinelearningservices.models.SystemService] - :param agent_count: Number of agents. - :type agent_count: int - :param agent_vm_size: Agent virtual machine size. - :type agent_vm_size: str - :param ssl_configuration: SSL configuration. - :type ssl_configuration: ~azure.mgmt.machinelearningservices.models.SslConfiguration - :param aks_networking_configuration: AKS networking configuration for vnet. - :type aks_networking_configuration: - ~azure.mgmt.machinelearningservices.models.AksNetworkingConfiguration - """ - - _validation = { - 'system_services': {'readonly': True}, - 'agent_count': {'minimum': 1}, - } - - _attribute_map = { - 'cluster_fqdn': {'key': 'clusterFqdn', 'type': 'str'}, - 'system_services': {'key': 'systemServices', 'type': '[SystemService]'}, - 'agent_count': {'key': 'agentCount', 'type': 'int'}, - 'agent_vm_size': {'key': 'agentVMSize', 'type': 'str'}, - 'ssl_configuration': {'key': 'sslConfiguration', 'type': 'SslConfiguration'}, - 'aks_networking_configuration': {'key': 'aksNetworkingConfiguration', 'type': 'AksNetworkingConfiguration'}, - } - - def __init__( - self, - **kwargs - ): - super(AKSProperties, self).__init__(**kwargs) - self.cluster_fqdn = kwargs.get('cluster_fqdn', None) - self.system_services = None - self.agent_count = kwargs.get('agent_count', None) - self.agent_vm_size = kwargs.get('agent_vm_size', None) - self.ssl_configuration = kwargs.get('ssl_configuration', None) - self.aks_networking_configuration = kwargs.get('aks_networking_configuration', None) - - -class AmlCompute(Compute): - """An Azure Machine Learning compute. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :param properties: AML Compute properties. - :type properties: ~azure.mgmt.machinelearningservices.models.AmlComputeProperties - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(AmlCompute, self).__init__(**kwargs) - self.compute_type = 'AmlCompute' # type: str - self.properties = kwargs.get('properties', None) - - -class AmlComputeNodeInformation(msrest.serialization.Model): - """Compute node information related to a AmlCompute. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar node_id: ID of the compute node. - :vartype node_id: str - :ivar private_ip_address: Private IP address of the compute node. - :vartype private_ip_address: str - :ivar public_ip_address: Public IP address of the compute node. - :vartype public_ip_address: str - :ivar port: SSH port number of the node. - :vartype port: int - :ivar node_state: State of the compute node. Values are idle, running, preparing, unusable, - leaving and preempted. Possible values include: "idle", "running", "preparing", "unusable", - "leaving", "preempted". - :vartype node_state: str or ~azure.mgmt.machinelearningservices.models.NodeState - :ivar run_id: ID of the Experiment running on the node, if any else null. - :vartype run_id: str - """ - - _validation = { - 'node_id': {'readonly': True}, - 'private_ip_address': {'readonly': True}, - 'public_ip_address': {'readonly': True}, - 'port': {'readonly': True}, - 'node_state': {'readonly': True}, - 'run_id': {'readonly': True}, - } - - _attribute_map = { - 'node_id': {'key': 'nodeId', 'type': 'str'}, - 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, - 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, - 'port': {'key': 'port', 'type': 'int'}, - 'node_state': {'key': 'nodeState', 'type': 'str'}, - 'run_id': {'key': 'runId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AmlComputeNodeInformation, self).__init__(**kwargs) - self.node_id = None - self.private_ip_address = None - self.public_ip_address = None - self.port = None - self.node_state = None - self.run_id = None - - -class ComputeNodesInformation(msrest.serialization.Model): - """Compute nodes information related to a Machine Learning compute. Might differ for every type of compute. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmlComputeNodesInformation. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :ivar next_link: The continuation token. - :vartype next_link: str - """ - - _validation = { - 'compute_type': {'required': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - _subtype_map = { - 'compute_type': {'AmlCompute': 'AmlComputeNodesInformation'} - } - - def __init__( - self, - **kwargs - ): - super(ComputeNodesInformation, self).__init__(**kwargs) - self.compute_type = None # type: Optional[str] - self.next_link = None - - -class AmlComputeNodesInformation(ComputeNodesInformation): - """Compute node information related to a AmlCompute. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :ivar next_link: The continuation token. - :vartype next_link: str - :ivar nodes: The collection of returned AmlCompute nodes details. - :vartype nodes: list[~azure.mgmt.machinelearningservices.models.AmlComputeNodeInformation] - """ - - _validation = { - 'compute_type': {'required': True}, - 'next_link': {'readonly': True}, - 'nodes': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'nodes': {'key': 'nodes', 'type': '[AmlComputeNodeInformation]'}, - } - - def __init__( - self, - **kwargs - ): - super(AmlComputeNodesInformation, self).__init__(**kwargs) - self.compute_type = 'AmlCompute' # type: str - self.nodes = None - - -class AmlComputeProperties(msrest.serialization.Model): - """AML Compute properties. - - Variables are only populated by the server, and will be ignored when sending a request. - - :param vm_size: Virtual Machine Size. - :type vm_size: str - :param vm_priority: Virtual Machine priority. Possible values include: "Dedicated", - "LowPriority". - :type vm_priority: str or ~azure.mgmt.machinelearningservices.models.VmPriority - :param scale_settings: Scale settings for AML Compute. - :type scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings - :param user_account_credentials: Credentials for an administrator user account that will be - created on each compute node. - :type user_account_credentials: - ~azure.mgmt.machinelearningservices.models.UserAccountCredentials - :param subnet: Virtual network subnet resource ID the compute nodes belong to. - :type subnet: ~azure.mgmt.machinelearningservices.models.ResourceId - :param remote_login_port_public_access: State of the public SSH port. Possible values are: - Disabled - Indicates that the public ssh port is closed on all nodes of the cluster. Enabled - - Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified - - Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined, - else is open all public nodes. It can be default only during cluster creation time, after - creation it will be either enabled or disabled. Possible values include: "Enabled", "Disabled", - "NotSpecified". Default value: "NotSpecified". - :type remote_login_port_public_access: str or - ~azure.mgmt.machinelearningservices.models.RemoteLoginPortPublicAccess - :ivar allocation_state: Allocation state of the compute. Possible values are: steady - - Indicates that the compute is not resizing. There are no changes to the number of compute nodes - in the compute in progress. A compute enters this state when it is created and when no - operations are being performed on the compute to change the number of compute nodes. resizing - - Indicates that the compute is resizing; that is, compute nodes are being added to or removed - from the compute. Possible values include: "Steady", "Resizing". - :vartype allocation_state: str or ~azure.mgmt.machinelearningservices.models.AllocationState - :ivar allocation_state_transition_time: The time at which the compute entered its current - allocation state. - :vartype allocation_state_transition_time: ~datetime.datetime - :ivar errors: Collection of errors encountered by various compute nodes during node setup. - :vartype errors: list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar current_node_count: The number of compute nodes currently assigned to the compute. - :vartype current_node_count: int - :ivar target_node_count: The target number of compute nodes for the compute. If the - allocationState is resizing, this property denotes the target node count for the ongoing resize - operation. If the allocationState is steady, this property denotes the target node count for - the previous resize operation. - :vartype target_node_count: int - :ivar node_state_counts: Counts of various node states on the compute. - :vartype node_state_counts: ~azure.mgmt.machinelearningservices.models.NodeStateCounts - """ - - _validation = { - 'allocation_state': {'readonly': True}, - 'allocation_state_transition_time': {'readonly': True}, - 'errors': {'readonly': True}, - 'current_node_count': {'readonly': True}, - 'target_node_count': {'readonly': True}, - 'node_state_counts': {'readonly': True}, - } - - _attribute_map = { - 'vm_size': {'key': 'vmSize', 'type': 'str'}, - 'vm_priority': {'key': 'vmPriority', 'type': 'str'}, - 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'}, - 'user_account_credentials': {'key': 'userAccountCredentials', 'type': 'UserAccountCredentials'}, - 'subnet': {'key': 'subnet', 'type': 'ResourceId'}, - 'remote_login_port_public_access': {'key': 'remoteLoginPortPublicAccess', 'type': 'str'}, - 'allocation_state': {'key': 'allocationState', 'type': 'str'}, - 'allocation_state_transition_time': {'key': 'allocationStateTransitionTime', 'type': 'iso-8601'}, - 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'}, - 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'}, - 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'}, - 'node_state_counts': {'key': 'nodeStateCounts', 'type': 'NodeStateCounts'}, - } - - def __init__( - self, - **kwargs - ): - super(AmlComputeProperties, self).__init__(**kwargs) - self.vm_size = kwargs.get('vm_size', None) - self.vm_priority = kwargs.get('vm_priority', None) - self.scale_settings = kwargs.get('scale_settings', None) - self.user_account_credentials = kwargs.get('user_account_credentials', None) - self.subnet = kwargs.get('subnet', None) - self.remote_login_port_public_access = kwargs.get('remote_login_port_public_access', "NotSpecified") - self.allocation_state = None - self.allocation_state_transition_time = None - self.errors = None - self.current_node_count = None - self.target_node_count = None - self.node_state_counts = None - - -class AmlUserFeature(msrest.serialization.Model): - """Features enabled for a workspace. - - :param id: Specifies the feature ID. - :type id: str - :param display_name: Specifies the feature name. - :type display_name: str - :param description: Describes the feature for user experience. - :type description: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AmlUserFeature, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.display_name = kwargs.get('display_name', None) - self.description = kwargs.get('description', None) - - -class ClusterUpdateParameters(msrest.serialization.Model): - """AmlCompute update parameters. - - :param scale_settings: Desired scale settings for the amlCompute. - :type scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings - """ - - _attribute_map = { - 'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(ClusterUpdateParameters, self).__init__(**kwargs) - self.scale_settings = kwargs.get('scale_settings', None) - - -class ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties(msrest.serialization.Model): - """ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar principal_id: The principal id of user assigned identity. - :vartype principal_id: str - :ivar client_id: The client id of user assigned identity. - :vartype client_id: str - """ - - _validation = { - 'principal_id': {'readonly': True}, - 'client_id': {'readonly': True}, - } - - _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'client_id': {'key': 'clientId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties, self).__init__(**kwargs) - self.principal_id = None - self.client_id = None - - -class ComputeInstance(Compute): - """An Azure Machine Learning compute instance. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :param properties: Compute Instance properties. - :type properties: ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(ComputeInstance, self).__init__(**kwargs) - self.compute_type = 'ComputeInstance' # type: str - self.properties = kwargs.get('properties', None) - - -class ComputeInstanceApplication(msrest.serialization.Model): - """Defines an Aml Instance application and its connectivity endpoint URI. - - :param display_name: Name of the ComputeInstance application. - :type display_name: str - :param endpoint_uri: Application' endpoint URI. - :type endpoint_uri: str - """ - - _attribute_map = { - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ComputeInstanceApplication, self).__init__(**kwargs) - self.display_name = kwargs.get('display_name', None) - self.endpoint_uri = kwargs.get('endpoint_uri', None) - - -class ComputeInstanceConnectivityEndpoints(msrest.serialization.Model): - """Defines all connectivity endpoints and properties for a ComputeInstance. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar public_ip_address: Public IP Address of this ComputeInstance. - :vartype public_ip_address: str - :ivar private_ip_address: Private IP Address of this ComputeInstance (local to the VNET in - which the compute instance is deployed). - :vartype private_ip_address: str - """ - - _validation = { - 'public_ip_address': {'readonly': True}, - 'private_ip_address': {'readonly': True}, - } - - _attribute_map = { - 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, - 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ComputeInstanceConnectivityEndpoints, self).__init__(**kwargs) - self.public_ip_address = None - self.private_ip_address = None - - -class ComputeInstanceCreatedBy(msrest.serialization.Model): - """Describes information on user who created this ComputeInstance. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar user_name: Name of the user. - :vartype user_name: str - :ivar user_org_id: Uniquely identifies user' Azure Active Directory organization. - :vartype user_org_id: str - :ivar user_id: Uniquely identifies the user within his/her organization. - :vartype user_id: str - """ - - _validation = { - 'user_name': {'readonly': True}, - 'user_org_id': {'readonly': True}, - 'user_id': {'readonly': True}, - } - - _attribute_map = { - 'user_name': {'key': 'userName', 'type': 'str'}, - 'user_org_id': {'key': 'userOrgId', 'type': 'str'}, - 'user_id': {'key': 'userId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ComputeInstanceCreatedBy, self).__init__(**kwargs) - self.user_name = None - self.user_org_id = None - self.user_id = None - - -class ComputeInstanceLastOperation(msrest.serialization.Model): - """The last operation on ComputeInstance. - - :param operation_name: Name of the last operation. Possible values include: "Create", "Start", - "Stop", "Restart", "Reimage", "Delete". - :type operation_name: str or ~azure.mgmt.machinelearningservices.models.OperationName - :param operation_time: Time of the last operation. - :type operation_time: ~datetime.datetime - :param operation_status: Operation status. Possible values include: "InProgress", "Succeeded", - "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ReimageFailed", "DeleteFailed". - :type operation_status: str or ~azure.mgmt.machinelearningservices.models.OperationStatus - """ - - _attribute_map = { - 'operation_name': {'key': 'operationName', 'type': 'str'}, - 'operation_time': {'key': 'operationTime', 'type': 'iso-8601'}, - 'operation_status': {'key': 'operationStatus', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ComputeInstanceLastOperation, self).__init__(**kwargs) - self.operation_name = kwargs.get('operation_name', None) - self.operation_time = kwargs.get('operation_time', None) - self.operation_status = kwargs.get('operation_status', None) - - -class ComputeInstanceProperties(msrest.serialization.Model): - """Compute Instance properties. - - Variables are only populated by the server, and will be ignored when sending a request. - - :param vm_size: Virtual Machine Size. - :type vm_size: str - :param subnet: Virtual network subnet resource ID the compute nodes belong to. - :type subnet: ~azure.mgmt.machinelearningservices.models.ResourceId - :param application_sharing_policy: Policy for sharing applications on this compute instance - among users of parent workspace. If Personal, only the creator can access applications on this - compute instance. When Shared, any workspace user can access applications on this instance - depending on his/her assigned role. Possible values include: "Personal", "Shared". Default - value: "Shared". - :type application_sharing_policy: str or - ~azure.mgmt.machinelearningservices.models.ApplicationSharingPolicy - :param ssh_settings: Specifies policy and settings for SSH access. - :type ssh_settings: ~azure.mgmt.machinelearningservices.models.ComputeInstanceSshSettings - :ivar connectivity_endpoints: Describes all connectivity endpoints available for this - ComputeInstance. - :vartype connectivity_endpoints: - ~azure.mgmt.machinelearningservices.models.ComputeInstanceConnectivityEndpoints - :ivar applications: Describes available applications and their endpoints on this - ComputeInstance. - :vartype applications: - list[~azure.mgmt.machinelearningservices.models.ComputeInstanceApplication] - :ivar created_by: Describes information on user who created this ComputeInstance. - :vartype created_by: ~azure.mgmt.machinelearningservices.models.ComputeInstanceCreatedBy - :ivar errors: Collection of errors encountered on this ComputeInstance. - :vartype errors: list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar state: The current state of this ComputeInstance. Possible values include: "Creating", - "CreateFailed", "Deleting", "Running", "Restarting", "JobRunning", "SettingUp", "SetupFailed", - "Starting", "Stopped", "Stopping", "UserSettingUp", "UserSetupFailed", "Unknown", "Unusable". - :vartype state: str or ~azure.mgmt.machinelearningservices.models.ComputeInstanceState - :ivar last_operation: The last operation on ComputeInstance. - :vartype last_operation: - ~azure.mgmt.machinelearningservices.models.ComputeInstanceLastOperation - """ - - _validation = { - 'connectivity_endpoints': {'readonly': True}, - 'applications': {'readonly': True}, - 'created_by': {'readonly': True}, - 'errors': {'readonly': True}, - 'state': {'readonly': True}, - 'last_operation': {'readonly': True}, - } - - _attribute_map = { - 'vm_size': {'key': 'vmSize', 'type': 'str'}, - 'subnet': {'key': 'subnet', 'type': 'ResourceId'}, - 'application_sharing_policy': {'key': 'applicationSharingPolicy', 'type': 'str'}, - 'ssh_settings': {'key': 'sshSettings', 'type': 'ComputeInstanceSshSettings'}, - 'connectivity_endpoints': {'key': 'connectivityEndpoints', 'type': 'ComputeInstanceConnectivityEndpoints'}, - 'applications': {'key': 'applications', 'type': '[ComputeInstanceApplication]'}, - 'created_by': {'key': 'createdBy', 'type': 'ComputeInstanceCreatedBy'}, - 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'}, - 'state': {'key': 'state', 'type': 'str'}, - 'last_operation': {'key': 'lastOperation', 'type': 'ComputeInstanceLastOperation'}, - } - - def __init__( - self, - **kwargs - ): - super(ComputeInstanceProperties, self).__init__(**kwargs) - self.vm_size = kwargs.get('vm_size', None) - self.subnet = kwargs.get('subnet', None) - self.application_sharing_policy = kwargs.get('application_sharing_policy', "Shared") - self.ssh_settings = kwargs.get('ssh_settings', None) - self.connectivity_endpoints = None - self.applications = None - self.created_by = None - self.errors = None - self.state = None - self.last_operation = None - - -class ComputeInstanceSshSettings(msrest.serialization.Model): - """Specifies policy and settings for SSH access. - - Variables are only populated by the server, and will be ignored when sending a request. - - :param ssh_public_access: State of the public SSH port. Possible values are: Disabled - - Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the - public ssh port is open and accessible according to the VNet/subnet policy if applicable. - Possible values include: "Enabled", "Disabled". Default value: "Disabled". - :type ssh_public_access: str or ~azure.mgmt.machinelearningservices.models.SshPublicAccess - :ivar admin_user_name: Describes the admin user name. - :vartype admin_user_name: str - :ivar ssh_port: Describes the port for connecting through SSH. - :vartype ssh_port: int - :param admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen -t - rsa -b 2048" to generate your SSH key pairs. - :type admin_public_key: str - """ - - _validation = { - 'admin_user_name': {'readonly': True}, - 'ssh_port': {'readonly': True}, - } - - _attribute_map = { - 'ssh_public_access': {'key': 'sshPublicAccess', 'type': 'str'}, - 'admin_user_name': {'key': 'adminUserName', 'type': 'str'}, - 'ssh_port': {'key': 'sshPort', 'type': 'int'}, - 'admin_public_key': {'key': 'adminPublicKey', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ComputeInstanceSshSettings, self).__init__(**kwargs) - self.ssh_public_access = kwargs.get('ssh_public_access', "Disabled") - self.admin_user_name = None - self.ssh_port = None - self.admin_public_key = kwargs.get('admin_public_key', None) - - -class Resource(msrest.serialization.Model): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar name: Specifies the name of the resource. - :vartype name: str - :param identity: The identity of the resource. - :type identity: ~azure.mgmt.machinelearningservices.models.Identity - :param location: Specifies the location of the resource. - :type location: str - :ivar type: Specifies the type of the resource. - :vartype type: str - :param tags: A set of tags. Contains resource tags defined as key/value pairs. - :type tags: dict[str, str] - :param sku: The sku of the workspace. - :type sku: ~azure.mgmt.machinelearningservices.models.Sku - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - } - - def __init__( - self, - **kwargs - ): - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.identity = kwargs.get('identity', None) - self.location = kwargs.get('location', None) - self.type = None - self.tags = kwargs.get('tags', None) - self.sku = kwargs.get('sku', None) - - -class ComputeResource(Resource): - """Machine Learning compute object wrapped into ARM resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar name: Specifies the name of the resource. - :vartype name: str - :param identity: The identity of the resource. - :type identity: ~azure.mgmt.machinelearningservices.models.Identity - :param location: Specifies the location of the resource. - :type location: str - :ivar type: Specifies the type of the resource. - :vartype type: str - :param tags: A set of tags. Contains resource tags defined as key/value pairs. - :type tags: dict[str, str] - :param sku: The sku of the workspace. - :type sku: ~azure.mgmt.machinelearningservices.models.Sku - :param properties: Compute properties. - :type properties: ~azure.mgmt.machinelearningservices.models.Compute - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'properties': {'key': 'properties', 'type': 'Compute'}, - } - - def __init__( - self, - **kwargs - ): - super(ComputeResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class Databricks(Compute): - """A DataFactory compute. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :param properties: - :type properties: ~azure.mgmt.machinelearningservices.models.DatabricksProperties - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'DatabricksProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(Databricks, self).__init__(**kwargs) - self.compute_type = 'Databricks' # type: str - self.properties = kwargs.get('properties', None) - - -class DatabricksComputeSecrets(ComputeSecrets): - """Secrets related to a Machine Learning compute based on Databricks. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param databricks_access_token: access token for databricks account. - :type databricks_access_token: str - """ - - _validation = { - 'compute_type': {'required': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DatabricksComputeSecrets, self).__init__(**kwargs) - self.compute_type = 'Databricks' # type: str - self.databricks_access_token = kwargs.get('databricks_access_token', None) - - -class DatabricksProperties(msrest.serialization.Model): - """DatabricksProperties. - - :param databricks_access_token: Databricks access token. - :type databricks_access_token: str - """ - - _attribute_map = { - 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DatabricksProperties, self).__init__(**kwargs) - self.databricks_access_token = kwargs.get('databricks_access_token', None) - - -class DataFactory(Compute): - """A DataFactory compute. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - super(DataFactory, self).__init__(**kwargs) - self.compute_type = 'DataFactory' # type: str - - -class DataLakeAnalytics(Compute): - """A DataLakeAnalytics compute. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :param properties: - :type properties: ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsProperties - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(DataLakeAnalytics, self).__init__(**kwargs) - self.compute_type = 'DataLakeAnalytics' # type: str - self.properties = kwargs.get('properties', None) - - -class DataLakeAnalyticsProperties(msrest.serialization.Model): - """DataLakeAnalyticsProperties. - - :param data_lake_store_account_name: DataLake Store Account Name. - :type data_lake_store_account_name: str - """ - - _attribute_map = { - 'data_lake_store_account_name': {'key': 'dataLakeStoreAccountName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DataLakeAnalyticsProperties, self).__init__(**kwargs) - self.data_lake_store_account_name = kwargs.get('data_lake_store_account_name', None) - - -class EncryptionProperty(msrest.serialization.Model): - """EncryptionProperty. - - All required parameters must be populated in order to send to Azure. - - :param status: Required. Indicates whether or not the encryption is enabled for the workspace. - Possible values include: "Enabled", "Disabled". - :type status: str or ~azure.mgmt.machinelearningservices.models.EncryptionStatus - :param key_vault_properties: Required. Customer Key vault properties. - :type key_vault_properties: ~azure.mgmt.machinelearningservices.models.KeyVaultProperties - """ - - _validation = { - 'status': {'required': True}, - 'key_vault_properties': {'required': True}, - } - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'KeyVaultProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(EncryptionProperty, self).__init__(**kwargs) - self.status = kwargs['status'] - self.key_vault_properties = kwargs['key_vault_properties'] - - -class ErrorDetail(msrest.serialization.Model): - """Error detail information. - - All required parameters must be populated in order to send to Azure. - - :param code: Required. Error code. - :type code: str - :param message: Required. Error message. - :type message: str - """ - - _validation = { - 'code': {'required': True}, - 'message': {'required': True}, - } - - _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ErrorDetail, self).__init__(**kwargs) - self.code = kwargs['code'] - self.message = kwargs['message'] - - -class ErrorResponse(msrest.serialization.Model): - """Error response information. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar code: Error code. - :vartype code: str - :ivar message: Error message. - :vartype message: str - :ivar details: An array of error detail objects. - :vartype details: list[~azure.mgmt.machinelearningservices.models.ErrorDetail] - """ - - _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, - 'details': {'readonly': True}, - } - - _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'details': {'key': 'details', 'type': '[ErrorDetail]'}, - } - - def __init__( - self, - **kwargs - ): - super(ErrorResponse, self).__init__(**kwargs) - self.code = None - self.message = None - self.details = None - - -class EstimatedVMPrice(msrest.serialization.Model): - """The estimated price info for using a VM of a particular OS type, tier, etc. - - All required parameters must be populated in order to send to Azure. - - :param retail_price: Required. The price charged for using the VM. - :type retail_price: float - :param os_type: Required. Operating system type used by the VM. Possible values include: - "Linux", "Windows". - :type os_type: str or ~azure.mgmt.machinelearningservices.models.VMPriceOSType - :param vm_tier: Required. The type of the VM. Possible values include: "Standard", - "LowPriority", "Spot". - :type vm_tier: str or ~azure.mgmt.machinelearningservices.models.VMTier - """ - - _validation = { - 'retail_price': {'required': True}, - 'os_type': {'required': True}, - 'vm_tier': {'required': True}, - } - - _attribute_map = { - 'retail_price': {'key': 'retailPrice', 'type': 'float'}, - 'os_type': {'key': 'osType', 'type': 'str'}, - 'vm_tier': {'key': 'vmTier', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(EstimatedVMPrice, self).__init__(**kwargs) - self.retail_price = kwargs['retail_price'] - self.os_type = kwargs['os_type'] - self.vm_tier = kwargs['vm_tier'] - - -class EstimatedVMPrices(msrest.serialization.Model): - """The estimated price info for using a VM. - - All required parameters must be populated in order to send to Azure. - - :param billing_currency: Required. Three lettered code specifying the currency of the VM price. - Example: USD. Possible values include: "USD". - :type billing_currency: str or ~azure.mgmt.machinelearningservices.models.BillingCurrency - :param unit_of_measure: Required. The unit of time measurement for the specified VM price. - Example: OneHour. Possible values include: "OneHour". - :type unit_of_measure: str or ~azure.mgmt.machinelearningservices.models.UnitOfMeasure - :param values: Required. The list of estimated prices for using a VM of a particular OS type, - tier, etc. - :type values: list[~azure.mgmt.machinelearningservices.models.EstimatedVMPrice] - """ - - _validation = { - 'billing_currency': {'required': True}, - 'unit_of_measure': {'required': True}, - 'values': {'required': True}, - } - - _attribute_map = { - 'billing_currency': {'key': 'billingCurrency', 'type': 'str'}, - 'unit_of_measure': {'key': 'unitOfMeasure', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[EstimatedVMPrice]'}, - } - - def __init__( - self, - **kwargs - ): - super(EstimatedVMPrices, self).__init__(**kwargs) - self.billing_currency = kwargs['billing_currency'] - self.unit_of_measure = kwargs['unit_of_measure'] - self.values = kwargs['values'] - - -class HDInsight(Compute): - """A HDInsight compute. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :param properties: - :type properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'HDInsightProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(HDInsight, self).__init__(**kwargs) - self.compute_type = 'HDInsight' # type: str - self.properties = kwargs.get('properties', None) - - -class HDInsightProperties(msrest.serialization.Model): - """HDInsightProperties. - - :param ssh_port: Port open for ssh connections on the master node of the cluster. - :type ssh_port: int - :param address: Public IP address of the master node of the cluster. - :type address: str - :param administrator_account: Admin credentials for master node of the cluster. - :type administrator_account: - ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials - """ - - _attribute_map = { - 'ssh_port': {'key': 'sshPort', 'type': 'int'}, - 'address': {'key': 'address', 'type': 'str'}, - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, - } - - def __init__( - self, - **kwargs - ): - super(HDInsightProperties, self).__init__(**kwargs) - self.ssh_port = kwargs.get('ssh_port', None) - self.address = kwargs.get('address', None) - self.administrator_account = kwargs.get('administrator_account', None) - - -class Identity(msrest.serialization.Model): - """Identity for the resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar principal_id: The principal ID of resource identity. - :vartype principal_id: str - :ivar tenant_id: The tenant ID of resource. - :vartype tenant_id: str - :param type: Required. The identity type. Possible values include: "SystemAssigned", - "UserAssigned", "SystemAssigned,UserAssigned", "None". - :type type: str or ~azure.mgmt.machinelearningservices.models.ResourceIdentityType - :param user_assigned_identities: The list of user identities associated with resource. The user - identity dictionary key references will be ARM resource ids in the form: - '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. - :type user_assigned_identities: dict[str, - ~azure.mgmt.machinelearningservices.models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties] - """ - - _validation = { - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties}'}, - } - - def __init__( - self, - **kwargs - ): - super(Identity, self).__init__(**kwargs) - self.principal_id = None - self.tenant_id = None - self.type = kwargs['type'] - self.user_assigned_identities = kwargs.get('user_assigned_identities', None) - - -class KeyVaultProperties(msrest.serialization.Model): - """KeyVaultProperties. - - All required parameters must be populated in order to send to Azure. - - :param key_vault_arm_id: Required. The ArmId of the keyVault where the customer owned - encryption key is present. - :type key_vault_arm_id: str - :param key_identifier: Required. Key vault uri to access the encryption key. - :type key_identifier: str - :param identity_client_id: For future use - The client id of the identity which will be used to - access key vault. - :type identity_client_id: str - """ - - _validation = { - 'key_vault_arm_id': {'required': True}, - 'key_identifier': {'required': True}, - } - - _attribute_map = { - 'key_vault_arm_id': {'key': 'keyVaultArmId', 'type': 'str'}, - 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'}, - 'identity_client_id': {'key': 'identityClientId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(KeyVaultProperties, self).__init__(**kwargs) - self.key_vault_arm_id = kwargs['key_vault_arm_id'] - self.key_identifier = kwargs['key_identifier'] - self.identity_client_id = kwargs.get('identity_client_id', None) - - -class ListAmlUserFeatureResult(msrest.serialization.Model): - """The List Aml user feature operation response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The list of AML user facing features. - :vartype value: list[~azure.mgmt.machinelearningservices.models.AmlUserFeature] - :ivar next_link: The URI to fetch the next page of AML user features information. Call - ListNext() with this to fetch the next page of AML user features information. - :vartype next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[AmlUserFeature]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ListAmlUserFeatureResult, self).__init__(**kwargs) - self.value = None - self.next_link = None - - -class ListUsagesResult(msrest.serialization.Model): - """The List Usages operation response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The list of AML resource usages. - :vartype value: list[~azure.mgmt.machinelearningservices.models.Usage] - :ivar next_link: The URI to fetch the next page of AML resource usage information. Call - ListNext() with this to fetch the next page of AML resource usage information. - :vartype next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[Usage]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ListUsagesResult, self).__init__(**kwargs) - self.value = None - self.next_link = None - - -class ListWorkspaceKeysResult(msrest.serialization.Model): - """ListWorkspaceKeysResult. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar user_storage_key: - :vartype user_storage_key: str - :ivar user_storage_resource_id: - :vartype user_storage_resource_id: str - :ivar app_insights_instrumentation_key: - :vartype app_insights_instrumentation_key: str - :ivar container_registry_credentials: - :vartype container_registry_credentials: - ~azure.mgmt.machinelearningservices.models.RegistryListCredentialsResult - :param notebook_access_keys: - :type notebook_access_keys: - ~azure.mgmt.machinelearningservices.models.NotebookListCredentialsResult - """ - - _validation = { - 'user_storage_key': {'readonly': True}, - 'user_storage_resource_id': {'readonly': True}, - 'app_insights_instrumentation_key': {'readonly': True}, - 'container_registry_credentials': {'readonly': True}, - } - - _attribute_map = { - 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'}, - 'user_storage_resource_id': {'key': 'userStorageResourceId', 'type': 'str'}, - 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'}, - 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'}, - 'notebook_access_keys': {'key': 'notebookAccessKeys', 'type': 'NotebookListCredentialsResult'}, - } - - def __init__( - self, - **kwargs - ): - super(ListWorkspaceKeysResult, self).__init__(**kwargs) - self.user_storage_key = None - self.user_storage_resource_id = None - self.app_insights_instrumentation_key = None - self.container_registry_credentials = None - self.notebook_access_keys = kwargs.get('notebook_access_keys', None) - - -class ListWorkspaceQuotas(msrest.serialization.Model): - """The List WorkspaceQuotasByVMFamily operation response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The list of Workspace Quotas by VM Family. - :vartype value: list[~azure.mgmt.machinelearningservices.models.ResourceQuota] - :ivar next_link: The URI to fetch the next page of workspace quota information by VM Family. - Call ListNext() with this to fetch the next page of Workspace Quota information. - :vartype next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[ResourceQuota]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ListWorkspaceQuotas, self).__init__(**kwargs) - self.value = None - self.next_link = None - - -class MachineLearningServiceError(msrest.serialization.Model): - """Wrapper for error response to follow ARM guidelines. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar error: The error response. - :vartype error: ~azure.mgmt.machinelearningservices.models.ErrorResponse - """ - - _validation = { - 'error': {'readonly': True}, - } - - _attribute_map = { - 'error': {'key': 'error', 'type': 'ErrorResponse'}, - } - - def __init__( - self, - **kwargs - ): - super(MachineLearningServiceError, self).__init__(**kwargs) - self.error = None - - -class NodeStateCounts(msrest.serialization.Model): - """Counts of various compute node states on the amlCompute. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar idle_node_count: Number of compute nodes in idle state. - :vartype idle_node_count: int - :ivar running_node_count: Number of compute nodes which are running jobs. - :vartype running_node_count: int - :ivar preparing_node_count: Number of compute nodes which are being prepared. - :vartype preparing_node_count: int - :ivar unusable_node_count: Number of compute nodes which are in unusable state. - :vartype unusable_node_count: int - :ivar leaving_node_count: Number of compute nodes which are leaving the amlCompute. - :vartype leaving_node_count: int - :ivar preempted_node_count: Number of compute nodes which are in preempted state. - :vartype preempted_node_count: int - """ - - _validation = { - 'idle_node_count': {'readonly': True}, - 'running_node_count': {'readonly': True}, - 'preparing_node_count': {'readonly': True}, - 'unusable_node_count': {'readonly': True}, - 'leaving_node_count': {'readonly': True}, - 'preempted_node_count': {'readonly': True}, - } - - _attribute_map = { - 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'}, - 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'}, - 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'}, - 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'}, - 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'}, - 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(NodeStateCounts, self).__init__(**kwargs) - self.idle_node_count = None - self.running_node_count = None - self.preparing_node_count = None - self.unusable_node_count = None - self.leaving_node_count = None - self.preempted_node_count = None - - -class NotebookListCredentialsResult(msrest.serialization.Model): - """NotebookListCredentialsResult. - - :param primary_access_key: - :type primary_access_key: str - :param secondary_access_key: - :type secondary_access_key: str - """ - - _attribute_map = { - 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'}, - 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(NotebookListCredentialsResult, self).__init__(**kwargs) - self.primary_access_key = kwargs.get('primary_access_key', None) - self.secondary_access_key = kwargs.get('secondary_access_key', None) - - -class NotebookPreparationError(msrest.serialization.Model): - """NotebookPreparationError. - - :param error_message: - :type error_message: str - :param status_code: - :type status_code: int - """ - - _attribute_map = { - 'error_message': {'key': 'errorMessage', 'type': 'str'}, - 'status_code': {'key': 'statusCode', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(NotebookPreparationError, self).__init__(**kwargs) - self.error_message = kwargs.get('error_message', None) - self.status_code = kwargs.get('status_code', None) - - -class NotebookResourceInfo(msrest.serialization.Model): - """NotebookResourceInfo. - - :param fqdn: - :type fqdn: str - :param resource_id: the data plane resourceId that used to initialize notebook component. - :type resource_id: str - :param notebook_preparation_error: The error that occurs when preparing notebook. - :type notebook_preparation_error: - ~azure.mgmt.machinelearningservices.models.NotebookPreparationError - """ - - _attribute_map = { - 'fqdn': {'key': 'fqdn', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'notebook_preparation_error': {'key': 'notebookPreparationError', 'type': 'NotebookPreparationError'}, - } - - def __init__( - self, - **kwargs - ): - super(NotebookResourceInfo, self).__init__(**kwargs) - self.fqdn = kwargs.get('fqdn', None) - self.resource_id = kwargs.get('resource_id', None) - self.notebook_preparation_error = kwargs.get('notebook_preparation_error', None) - - -class Operation(msrest.serialization.Model): - """Azure Machine Learning workspace REST API operation. - - :param name: Operation name: {provider}/{resource}/{operation}. - :type name: str - :param display: Display name of operation. - :type display: ~azure.mgmt.machinelearningservices.models.OperationDisplay - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display': {'key': 'display', 'type': 'OperationDisplay'}, - } - - def __init__( - self, - **kwargs - ): - super(Operation, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.display = kwargs.get('display', None) - - -class OperationDisplay(msrest.serialization.Model): - """Display name of operation. - - :param provider: The resource provider name: Microsoft.MachineLearningExperimentation. - :type provider: str - :param resource: The resource on which the operation is performed. - :type resource: str - :param operation: The operation that users can perform. - :type operation: str - :param description: The description for the operation. - :type description: str - """ - - _attribute_map = { - 'provider': {'key': 'provider', 'type': 'str'}, - 'resource': {'key': 'resource', 'type': 'str'}, - 'operation': {'key': 'operation', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(OperationDisplay, self).__init__(**kwargs) - self.provider = kwargs.get('provider', None) - self.resource = kwargs.get('resource', None) - self.operation = kwargs.get('operation', None) - self.description = kwargs.get('description', None) - - -class OperationListResult(msrest.serialization.Model): - """An array of operations supported by the resource provider. - - :param value: List of AML workspace operations supported by the AML workspace resource - provider. - :type value: list[~azure.mgmt.machinelearningservices.models.Operation] - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[Operation]'}, - } - - def __init__( - self, - **kwargs - ): - super(OperationListResult, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - - -class PaginatedComputeResourcesList(msrest.serialization.Model): - """Paginated list of Machine Learning compute objects wrapped in ARM resource envelope. - - :param value: An array of Machine Learning compute objects wrapped in ARM resource envelope. - :type value: list[~azure.mgmt.machinelearningservices.models.ComputeResource] - :param next_link: A continuation link (absolute URI) to the next page of results in the list. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[ComputeResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PaginatedComputeResourcesList, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) - - -class PaginatedWorkspaceConnectionsList(msrest.serialization.Model): - """Paginated list of Workspace connection objects. - - :param value: An array of Workspace connection objects. - :type value: list[~azure.mgmt.machinelearningservices.models.WorkspaceConnection] - :param next_link: A continuation link (absolute URI) to the next page of results in the list. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[WorkspaceConnection]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PaginatedWorkspaceConnectionsList, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) - - -class Password(msrest.serialization.Model): - """Password. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar name: - :vartype name: str - :ivar value: - :vartype value: str - """ - - _validation = { - 'name': {'readonly': True}, - 'value': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Password, self).__init__(**kwargs) - self.name = None - self.value = None - - -class PrivateEndpoint(msrest.serialization.Model): - """The Private Endpoint resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The ARM identifier for Private Endpoint. - :vartype id: str - """ - - _validation = { - 'id': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateEndpoint, self).__init__(**kwargs) - self.id = None - - -class PrivateEndpointConnection(msrest.serialization.Model): - """The Private Endpoint Connection resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: ResourceId of the private endpoint connection. - :vartype id: str - :ivar name: Friendly name of the private endpoint connection. - :vartype name: str - :ivar type: Resource type of private endpoint connection. - :vartype type: str - :param private_endpoint: The resource of private end point. - :type private_endpoint: ~azure.mgmt.machinelearningservices.models.PrivateEndpoint - :param private_link_service_connection_state: A collection of information about the state of - the connection between service consumer and provider. - :type private_link_service_connection_state: - ~azure.mgmt.machinelearningservices.models.PrivateLinkServiceConnectionState - :ivar provisioning_state: The provisioning state of the private endpoint connection resource. - Possible values include: "Succeeded", "Creating", "Deleting", "Failed". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnectionProvisioningState - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'}, - 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateEndpointConnection, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.private_endpoint = kwargs.get('private_endpoint', None) - self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) - self.provisioning_state = None - - -class PrivateLinkResource(Resource): - """A private link resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar name: Specifies the name of the resource. - :vartype name: str - :param identity: The identity of the resource. - :type identity: ~azure.mgmt.machinelearningservices.models.Identity - :param location: Specifies the location of the resource. - :type location: str - :ivar type: Specifies the type of the resource. - :vartype type: str - :param tags: A set of tags. Contains resource tags defined as key/value pairs. - :type tags: dict[str, str] - :param sku: The sku of the workspace. - :type sku: ~azure.mgmt.machinelearningservices.models.Sku - :ivar group_id: The private link resource group id. - :vartype group_id: str - :ivar required_members: The private link resource required member names. - :vartype required_members: list[str] - :param required_zone_names: The private link resource Private link DNS zone name. - :type required_zone_names: list[str] - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'group_id': {'readonly': True}, - 'required_members': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'group_id': {'key': 'properties.groupId', 'type': 'str'}, - 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'}, - 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateLinkResource, self).__init__(**kwargs) - self.group_id = None - self.required_members = None - self.required_zone_names = kwargs.get('required_zone_names', None) - - -class PrivateLinkResourceListResult(msrest.serialization.Model): - """A list of private link resources. - - :param value: Array of private link resources. - :type value: list[~azure.mgmt.machinelearningservices.models.PrivateLinkResource] - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateLinkResourceListResult, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - - -class PrivateLinkServiceConnectionState(msrest.serialization.Model): - """A collection of information about the state of the connection between service consumer and provider. - - :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner - of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected", - "Timeout". - :type status: str or - ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus - :param description: The reason for approval/rejection of the connection. - :type description: str - :param actions_required: A message indicating if changes on the service provider require any - updates on the consumer. - :type actions_required: str - """ - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) - self.status = kwargs.get('status', None) - self.description = kwargs.get('description', None) - self.actions_required = kwargs.get('actions_required', None) - - -class QuotaBaseProperties(msrest.serialization.Model): - """The properties for Quota update or retrieval. - - :param id: Specifies the resource ID. - :type id: str - :param type: Specifies the resource type. - :type type: str - :param limit: The maximum permitted quota of the resource. - :type limit: long - :param unit: An enum describing the unit of quota measurement. Possible values include: - "Count". - :type unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'unit': {'key': 'unit', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(QuotaBaseProperties, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.type = kwargs.get('type', None) - self.limit = kwargs.get('limit', None) - self.unit = kwargs.get('unit', None) - - -class QuotaUpdateParameters(msrest.serialization.Model): - """Quota update parameters. - - :param value: The list for update quota. - :type value: list[~azure.mgmt.machinelearningservices.models.QuotaBaseProperties] - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[QuotaBaseProperties]'}, - } - - def __init__( - self, - **kwargs - ): - super(QuotaUpdateParameters, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - - -class RegistryListCredentialsResult(msrest.serialization.Model): - """RegistryListCredentialsResult. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar location: - :vartype location: str - :ivar username: - :vartype username: str - :param passwords: - :type passwords: list[~azure.mgmt.machinelearningservices.models.Password] - """ - - _validation = { - 'location': {'readonly': True}, - 'username': {'readonly': True}, - } - - _attribute_map = { - 'location': {'key': 'location', 'type': 'str'}, - 'username': {'key': 'username', 'type': 'str'}, - 'passwords': {'key': 'passwords', 'type': '[Password]'}, - } - - def __init__( - self, - **kwargs - ): - super(RegistryListCredentialsResult, self).__init__(**kwargs) - self.location = None - self.username = None - self.passwords = kwargs.get('passwords', None) - - -class ResourceId(msrest.serialization.Model): - """Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet. - - All required parameters must be populated in order to send to Azure. - - :param id: Required. The ID of the resource. - :type id: str - """ - - _validation = { - 'id': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ResourceId, self).__init__(**kwargs) - self.id = kwargs['id'] - - -class ResourceName(msrest.serialization.Model): - """The Resource Name. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The name of the resource. - :vartype value: str - :ivar localized_value: The localized name of the resource. - :vartype localized_value: str - """ - - _validation = { - 'value': {'readonly': True}, - 'localized_value': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': 'str'}, - 'localized_value': {'key': 'localizedValue', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ResourceName, self).__init__(**kwargs) - self.value = None - self.localized_value = None - - -class ResourceQuota(msrest.serialization.Model): - """The quota assigned to a resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar type: Specifies the resource type. - :vartype type: str - :ivar name: Name of the resource. - :vartype name: ~azure.mgmt.machinelearningservices.models.ResourceName - :ivar limit: The maximum permitted quota of the resource. - :vartype limit: long - :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count". - :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit - """ - - _validation = { - 'id': {'readonly': True}, - 'type': {'readonly': True}, - 'name': {'readonly': True}, - 'limit': {'readonly': True}, - 'unit': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'ResourceName'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'unit': {'key': 'unit', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ResourceQuota, self).__init__(**kwargs) - self.id = None - self.type = None - self.name = None - self.limit = None - self.unit = None - - -class ResourceSkuLocationInfo(msrest.serialization.Model): - """ResourceSkuLocationInfo. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar location: Location of the SKU. - :vartype location: str - :ivar zones: List of availability zones where the SKU is supported. - :vartype zones: list[str] - :ivar zone_details: Details of capabilities available to a SKU in specific zones. - :vartype zone_details: list[~azure.mgmt.machinelearningservices.models.ResourceSkuZoneDetails] - """ - - _validation = { - 'location': {'readonly': True}, - 'zones': {'readonly': True}, - 'zone_details': {'readonly': True}, - } - - _attribute_map = { - 'location': {'key': 'location', 'type': 'str'}, - 'zones': {'key': 'zones', 'type': '[str]'}, - 'zone_details': {'key': 'zoneDetails', 'type': '[ResourceSkuZoneDetails]'}, - } - - def __init__( - self, - **kwargs - ): - super(ResourceSkuLocationInfo, self).__init__(**kwargs) - self.location = None - self.zones = None - self.zone_details = None - - -class ResourceSkuZoneDetails(msrest.serialization.Model): - """Describes The zonal capabilities of a SKU. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar name: The set of zones that the SKU is available in with the specified capabilities. - :vartype name: list[str] - :ivar capabilities: A list of capabilities that are available for the SKU in the specified list - of zones. - :vartype capabilities: list[~azure.mgmt.machinelearningservices.models.SKUCapability] - """ - - _validation = { - 'name': {'readonly': True}, - 'capabilities': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': '[str]'}, - 'capabilities': {'key': 'capabilities', 'type': '[SKUCapability]'}, - } - - def __init__( - self, - **kwargs - ): - super(ResourceSkuZoneDetails, self).__init__(**kwargs) - self.name = None - self.capabilities = None - - -class Restriction(msrest.serialization.Model): - """The restriction because of which SKU cannot be used. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar type: The type of restrictions. As of now only possible value for this is location. - :vartype type: str - :ivar values: The value of restrictions. If the restriction type is set to location. This would - be different locations where the SKU is restricted. - :vartype values: list[str] - :param reason_code: The reason for the restriction. Possible values include: "NotSpecified", - "NotAvailableForRegion", "NotAvailableForSubscription". - :type reason_code: str or ~azure.mgmt.machinelearningservices.models.ReasonCode - """ - - _validation = { - 'type': {'readonly': True}, - 'values': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[str]'}, - 'reason_code': {'key': 'reasonCode', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Restriction, self).__init__(**kwargs) - self.type = None - self.values = None - self.reason_code = kwargs.get('reason_code', None) - - -class ScaleSettings(msrest.serialization.Model): - """scale settings for AML Compute. - - All required parameters must be populated in order to send to Azure. - - :param max_node_count: Required. Max number of nodes to use. - :type max_node_count: int - :param min_node_count: Min number of nodes to use. - :type min_node_count: int - :param node_idle_time_before_scale_down: Node Idle Time before scaling down amlCompute. - :type node_idle_time_before_scale_down: ~datetime.timedelta - """ - - _validation = { - 'max_node_count': {'required': True}, - } - - _attribute_map = { - 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, - 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, - 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'}, - } - - def __init__( - self, - **kwargs - ): - super(ScaleSettings, self).__init__(**kwargs) - self.max_node_count = kwargs['max_node_count'] - self.min_node_count = kwargs.get('min_node_count', 0) - self.node_idle_time_before_scale_down = kwargs.get('node_idle_time_before_scale_down', None) - - -class ServicePrincipalCredentials(msrest.serialization.Model): - """Service principal credentials. - - All required parameters must be populated in order to send to Azure. - - :param client_id: Required. Client Id. - :type client_id: str - :param client_secret: Required. Client secret. - :type client_secret: str - """ - - _validation = { - 'client_id': {'required': True}, - 'client_secret': {'required': True}, - } - - _attribute_map = { - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'client_secret': {'key': 'clientSecret', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ServicePrincipalCredentials, self).__init__(**kwargs) - self.client_id = kwargs['client_id'] - self.client_secret = kwargs['client_secret'] - - -class SharedPrivateLinkResource(msrest.serialization.Model): - """SharedPrivateLinkResource. - - :param name: Unique name of the private link. - :type name: str - :param private_link_resource_id: The resource id that private link links to. - :type private_link_resource_id: str - :param group_id: The private link resource group id. - :type group_id: str - :param request_message: Request message. - :type request_message: str - :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner - of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected", - "Timeout". - :type status: str or - ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'}, - 'group_id': {'key': 'properties.groupId', 'type': 'str'}, - 'request_message': {'key': 'properties.requestMessage', 'type': 'str'}, - 'status': {'key': 'properties.status', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SharedPrivateLinkResource, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.private_link_resource_id = kwargs.get('private_link_resource_id', None) - self.group_id = kwargs.get('group_id', None) - self.request_message = kwargs.get('request_message', None) - self.status = kwargs.get('status', None) - - -class Sku(msrest.serialization.Model): - """Sku of the resource. - - :param name: Name of the sku. - :type name: str - :param tier: Tier of the sku like Basic or Enterprise. - :type tier: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'tier': {'key': 'tier', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Sku, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.tier = kwargs.get('tier', None) - - -class SKUCapability(msrest.serialization.Model): - """Features/user capabilities associated with the sku. - - :param name: Capability/Feature ID. - :type name: str - :param value: Details about the feature/capability. - :type value: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SKUCapability, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.value = kwargs.get('value', None) - - -class SkuListResult(msrest.serialization.Model): - """List of skus with features. - - :param value: - :type value: list[~azure.mgmt.machinelearningservices.models.WorkspaceSku] - :param next_link: The URI to fetch the next page of Workspace Skus. Call ListNext() with this - URI to fetch the next page of Workspace Skus. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[WorkspaceSku]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SkuListResult, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) - - -class SkuSettings(msrest.serialization.Model): - """Describes Workspace Sku details and features. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar locations: The set of locations that the SKU is available. This will be supported and - registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.). - :vartype locations: list[str] - :ivar location_info: A list of locations and availability zones in those locations where the - SKU is available. - :vartype location_info: - list[~azure.mgmt.machinelearningservices.models.ResourceSkuLocationInfo] - :ivar tier: Sku Tier like Basic or Enterprise. - :vartype tier: str - :ivar resource_type: - :vartype resource_type: str - :ivar name: - :vartype name: str - :ivar capabilities: List of features/user capabilities associated with the sku. - :vartype capabilities: list[~azure.mgmt.machinelearningservices.models.SKUCapability] - :param restrictions: The restrictions because of which SKU cannot be used. This is empty if - there are no restrictions. - :type restrictions: list[~azure.mgmt.machinelearningservices.models.Restriction] - """ - - _validation = { - 'locations': {'readonly': True}, - 'location_info': {'readonly': True}, - 'tier': {'readonly': True}, - 'resource_type': {'readonly': True}, - 'name': {'readonly': True}, - 'capabilities': {'readonly': True}, - } - - _attribute_map = { - 'locations': {'key': 'locations', 'type': '[str]'}, - 'location_info': {'key': 'locationInfo', 'type': '[ResourceSkuLocationInfo]'}, - 'tier': {'key': 'tier', 'type': 'str'}, - 'resource_type': {'key': 'resourceType', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'capabilities': {'key': 'capabilities', 'type': '[SKUCapability]'}, - 'restrictions': {'key': 'restrictions', 'type': '[Restriction]'}, - } - - def __init__( - self, - **kwargs - ): - super(SkuSettings, self).__init__(**kwargs) - self.locations = None - self.location_info = None - self.tier = None - self.resource_type = None - self.name = None - self.capabilities = None - self.restrictions = kwargs.get('restrictions', None) - - -class SslConfiguration(msrest.serialization.Model): - """The ssl configuration for scoring. - - :param status: Enable or disable ssl for scoring. Possible values include: "Disabled", - "Enabled". - :type status: str or ~azure.mgmt.machinelearningservices.models.SslConfigurationStatus - :param cert: Cert data. - :type cert: str - :param key: Key data. - :type key: str - :param cname: CNAME of the cert. - :type cname: str - """ - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'cert': {'key': 'cert', 'type': 'str'}, - 'key': {'key': 'key', 'type': 'str'}, - 'cname': {'key': 'cname', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SslConfiguration, self).__init__(**kwargs) - self.status = kwargs.get('status', None) - self.cert = kwargs.get('cert', None) - self.key = kwargs.get('key', None) - self.cname = kwargs.get('cname', None) - - -class SystemService(msrest.serialization.Model): - """A system service running on a compute. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar system_service_type: The type of this system service. - :vartype system_service_type: str - :ivar public_ip_address: Public IP address. - :vartype public_ip_address: str - :ivar version: The version for this type. - :vartype version: str - """ - - _validation = { - 'system_service_type': {'readonly': True}, - 'public_ip_address': {'readonly': True}, - 'version': {'readonly': True}, - } - - _attribute_map = { - 'system_service_type': {'key': 'systemServiceType', 'type': 'str'}, - 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SystemService, self).__init__(**kwargs) - self.system_service_type = None - self.public_ip_address = None - self.version = None - - -class UpdateWorkspaceQuotas(msrest.serialization.Model): - """The properties for update Quota response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar type: Specifies the resource type. - :vartype type: str - :param limit: The maximum permitted quota of the resource. - :type limit: long - :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count". - :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit - :param status: Status of update workspace quota. Possible values include: "Undefined", - "Success", "Failure", "InvalidQuotaBelowClusterMinimum", - "InvalidQuotaExceedsSubscriptionLimit", "InvalidVMFamilyName", "OperationNotSupportedForSku", - "OperationNotEnabledForRegion". - :type status: str or ~azure.mgmt.machinelearningservices.models.Status - """ - - _validation = { - 'id': {'readonly': True}, - 'type': {'readonly': True}, - 'unit': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'unit': {'key': 'unit', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(UpdateWorkspaceQuotas, self).__init__(**kwargs) - self.id = None - self.type = None - self.limit = kwargs.get('limit', None) - self.unit = None - self.status = kwargs.get('status', None) - - -class UpdateWorkspaceQuotasResult(msrest.serialization.Model): - """The result of update workspace quota. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The list of workspace quota update result. - :vartype value: list[~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotas] - :ivar next_link: The URI to fetch the next page of workspace quota update result. Call - ListNext() with this to fetch the next page of Workspace Quota update result. - :vartype next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[UpdateWorkspaceQuotas]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(UpdateWorkspaceQuotasResult, self).__init__(**kwargs) - self.value = None - self.next_link = None - - -class Usage(msrest.serialization.Model): - """Describes AML Resource Usage. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar type: Specifies the resource type. - :vartype type: str - :ivar unit: An enum describing the unit of usage measurement. Possible values include: "Count". - :vartype unit: str or ~azure.mgmt.machinelearningservices.models.UsageUnit - :ivar current_value: The current usage of the resource. - :vartype current_value: long - :ivar limit: The maximum permitted usage of the resource. - :vartype limit: long - :ivar name: The name of the type of usage. - :vartype name: ~azure.mgmt.machinelearningservices.models.UsageName - """ - - _validation = { - 'id': {'readonly': True}, - 'type': {'readonly': True}, - 'unit': {'readonly': True}, - 'current_value': {'readonly': True}, - 'limit': {'readonly': True}, - 'name': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'unit': {'key': 'unit', 'type': 'str'}, - 'current_value': {'key': 'currentValue', 'type': 'long'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'UsageName'}, - } - - def __init__( - self, - **kwargs - ): - super(Usage, self).__init__(**kwargs) - self.id = None - self.type = None - self.unit = None - self.current_value = None - self.limit = None - self.name = None - - -class UsageName(msrest.serialization.Model): - """The Usage Names. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The name of the resource. - :vartype value: str - :ivar localized_value: The localized name of the resource. - :vartype localized_value: str - """ - - _validation = { - 'value': {'readonly': True}, - 'localized_value': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': 'str'}, - 'localized_value': {'key': 'localizedValue', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(UsageName, self).__init__(**kwargs) - self.value = None - self.localized_value = None - - -class UserAccountCredentials(msrest.serialization.Model): - """Settings for user account that gets created on each on the nodes of a compute. - - All required parameters must be populated in order to send to Azure. - - :param admin_user_name: Required. Name of the administrator user account which can be used to - SSH to nodes. - :type admin_user_name: str - :param admin_user_ssh_public_key: SSH public key of the administrator user account. - :type admin_user_ssh_public_key: str - :param admin_user_password: Password of the administrator user account. - :type admin_user_password: str - """ - - _validation = { - 'admin_user_name': {'required': True}, - } - - _attribute_map = { - 'admin_user_name': {'key': 'adminUserName', 'type': 'str'}, - 'admin_user_ssh_public_key': {'key': 'adminUserSshPublicKey', 'type': 'str'}, - 'admin_user_password': {'key': 'adminUserPassword', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(UserAccountCredentials, self).__init__(**kwargs) - self.admin_user_name = kwargs['admin_user_name'] - self.admin_user_ssh_public_key = kwargs.get('admin_user_ssh_public_key', None) - self.admin_user_password = kwargs.get('admin_user_password', None) - - -class VirtualMachine(Compute): - """A Machine Learning compute based on Azure Virtual Machines. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :param properties: - :type properties: ~azure.mgmt.machinelearningservices.models.VirtualMachineProperties - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'VirtualMachineProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(VirtualMachine, self).__init__(**kwargs) - self.compute_type = 'VirtualMachine' # type: str - self.properties = kwargs.get('properties', None) - - -class VirtualMachineProperties(msrest.serialization.Model): - """VirtualMachineProperties. - - :param virtual_machine_size: Virtual Machine size. - :type virtual_machine_size: str - :param ssh_port: Port open for ssh connections. - :type ssh_port: int - :param address: Public IP address of the virtual machine. - :type address: str - :param administrator_account: Admin credentials for virtual machine. - :type administrator_account: - ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials - """ - - _attribute_map = { - 'virtual_machine_size': {'key': 'virtualMachineSize', 'type': 'str'}, - 'ssh_port': {'key': 'sshPort', 'type': 'int'}, - 'address': {'key': 'address', 'type': 'str'}, - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, - } - - def __init__( - self, - **kwargs - ): - super(VirtualMachineProperties, self).__init__(**kwargs) - self.virtual_machine_size = kwargs.get('virtual_machine_size', None) - self.ssh_port = kwargs.get('ssh_port', None) - self.address = kwargs.get('address', None) - self.administrator_account = kwargs.get('administrator_account', None) - - -class VirtualMachineSecrets(ComputeSecrets): - """Secrets related to a Machine Learning compute based on AKS. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param administrator_account: Admin credentials for virtual machine. - :type administrator_account: - ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials - """ - - _validation = { - 'compute_type': {'required': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, - } - - def __init__( - self, - **kwargs - ): - super(VirtualMachineSecrets, self).__init__(**kwargs) - self.compute_type = 'VirtualMachine' # type: str - self.administrator_account = kwargs.get('administrator_account', None) - - -class VirtualMachineSize(msrest.serialization.Model): - """Describes the properties of a VM size. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar name: The name of the virtual machine size. - :vartype name: str - :ivar family: The family name of the virtual machine size. - :vartype family: str - :ivar v_cp_us: The number of vCPUs supported by the virtual machine size. - :vartype v_cp_us: int - :ivar gpus: The number of gPUs supported by the virtual machine size. - :vartype gpus: int - :ivar os_vhd_size_mb: The OS VHD disk size, in MB, allowed by the virtual machine size. - :vartype os_vhd_size_mb: int - :ivar max_resource_volume_mb: The resource volume size, in MB, allowed by the virtual machine - size. - :vartype max_resource_volume_mb: int - :ivar memory_gb: The amount of memory, in GB, supported by the virtual machine size. - :vartype memory_gb: float - :ivar low_priority_capable: Specifies if the virtual machine size supports low priority VMs. - :vartype low_priority_capable: bool - :ivar premium_io: Specifies if the virtual machine size supports premium IO. - :vartype premium_io: bool - :param estimated_vm_prices: The estimated price information for using a VM. - :type estimated_vm_prices: ~azure.mgmt.machinelearningservices.models.EstimatedVMPrices - :param supported_compute_types: Specifies the compute types supported by the virtual machine - size. - :type supported_compute_types: list[str] - """ - - _validation = { - 'name': {'readonly': True}, - 'family': {'readonly': True}, - 'v_cp_us': {'readonly': True}, - 'gpus': {'readonly': True}, - 'os_vhd_size_mb': {'readonly': True}, - 'max_resource_volume_mb': {'readonly': True}, - 'memory_gb': {'readonly': True}, - 'low_priority_capable': {'readonly': True}, - 'premium_io': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'family': {'key': 'family', 'type': 'str'}, - 'v_cp_us': {'key': 'vCPUs', 'type': 'int'}, - 'gpus': {'key': 'gpus', 'type': 'int'}, - 'os_vhd_size_mb': {'key': 'osVhdSizeMB', 'type': 'int'}, - 'max_resource_volume_mb': {'key': 'maxResourceVolumeMB', 'type': 'int'}, - 'memory_gb': {'key': 'memoryGB', 'type': 'float'}, - 'low_priority_capable': {'key': 'lowPriorityCapable', 'type': 'bool'}, - 'premium_io': {'key': 'premiumIO', 'type': 'bool'}, - 'estimated_vm_prices': {'key': 'estimatedVMPrices', 'type': 'EstimatedVMPrices'}, - 'supported_compute_types': {'key': 'supportedComputeTypes', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - super(VirtualMachineSize, self).__init__(**kwargs) - self.name = None - self.family = None - self.v_cp_us = None - self.gpus = None - self.os_vhd_size_mb = None - self.max_resource_volume_mb = None - self.memory_gb = None - self.low_priority_capable = None - self.premium_io = None - self.estimated_vm_prices = kwargs.get('estimated_vm_prices', None) - self.supported_compute_types = kwargs.get('supported_compute_types', None) - - -class VirtualMachineSizeListResult(msrest.serialization.Model): - """The List Virtual Machine size operation response. - - :param aml_compute: The list of virtual machine sizes supported by AmlCompute. - :type aml_compute: list[~azure.mgmt.machinelearningservices.models.VirtualMachineSize] - """ - - _attribute_map = { - 'aml_compute': {'key': 'amlCompute', 'type': '[VirtualMachineSize]'}, - } - - def __init__( - self, - **kwargs - ): - super(VirtualMachineSizeListResult, self).__init__(**kwargs) - self.aml_compute = kwargs.get('aml_compute', None) - - -class VirtualMachineSshCredentials(msrest.serialization.Model): - """Admin credentials for virtual machine. - - :param username: Username of admin account. - :type username: str - :param password: Password of admin account. - :type password: str - :param public_key_data: Public key data. - :type public_key_data: str - :param private_key_data: Private key data. - :type private_key_data: str - """ - - _attribute_map = { - 'username': {'key': 'username', 'type': 'str'}, - 'password': {'key': 'password', 'type': 'str'}, - 'public_key_data': {'key': 'publicKeyData', 'type': 'str'}, - 'private_key_data': {'key': 'privateKeyData', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(VirtualMachineSshCredentials, self).__init__(**kwargs) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.public_key_data = kwargs.get('public_key_data', None) - self.private_key_data = kwargs.get('private_key_data', None) - - -class Workspace(Resource): - """An object that represents a machine learning workspace. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar name: Specifies the name of the resource. - :vartype name: str - :param identity: The identity of the resource. - :type identity: ~azure.mgmt.machinelearningservices.models.Identity - :param location: Specifies the location of the resource. - :type location: str - :ivar type: Specifies the type of the resource. - :vartype type: str - :param tags: A set of tags. Contains resource tags defined as key/value pairs. - :type tags: dict[str, str] - :param sku: The sku of the workspace. - :type sku: ~azure.mgmt.machinelearningservices.models.Sku - :ivar workspace_id: The immutable id associated with this workspace. - :vartype workspace_id: str - :param description: The description of this workspace. - :type description: str - :param friendly_name: The friendly name for this workspace. This name in mutable. - :type friendly_name: str - :ivar creation_time: The creation time of the machine learning workspace in ISO8601 format. - :vartype creation_time: ~datetime.datetime - :param key_vault: ARM id of the key vault associated with this workspace. This cannot be - changed once the workspace has been created. - :type key_vault: str - :param application_insights: ARM id of the application insights associated with this workspace. - This cannot be changed once the workspace has been created. - :type application_insights: str - :param container_registry: ARM id of the container registry associated with this workspace. - This cannot be changed once the workspace has been created. - :type container_registry: str - :param storage_account: ARM id of the storage account associated with this workspace. This - cannot be changed once the workspace has been created. - :type storage_account: str - :param discovery_url: Url for the discovery service to identify regional endpoints for machine - learning experimentation services. - :type discovery_url: str - :ivar provisioning_state: The current deployment state of workspace resource. The - provisioningState is to indicate states for resource provisioning. Possible values include: - "Unknown", "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param encryption: The encryption settings of Azure ML workspace. - :type encryption: ~azure.mgmt.machinelearningservices.models.EncryptionProperty - :param hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data - collected by the service. - :type hbi_workspace: bool - :ivar service_provisioned_resource_group: The name of the managed resource group created by - workspace RP in customer subscription if the workspace is CMK workspace. - :vartype service_provisioned_resource_group: str - :ivar private_link_count: Count of private connections in the workspace. - :vartype private_link_count: int - :param image_build_compute: The compute name for image build. - :type image_build_compute: str - :param allow_public_access_when_behind_vnet: The flag to indicate whether to allow public - access when behind VNet. - :type allow_public_access_when_behind_vnet: bool - :ivar private_endpoint_connections: The list of private endpoint connections in the workspace. - :vartype private_endpoint_connections: - list[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection] - :param shared_private_link_resources: The list of shared private link resources in this - workspace. - :type shared_private_link_resources: - list[~azure.mgmt.machinelearningservices.models.SharedPrivateLinkResource] - :ivar notebook_info: The notebook info of Azure ML workspace. - :vartype notebook_info: ~azure.mgmt.machinelearningservices.models.NotebookResourceInfo - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'workspace_id': {'readonly': True}, - 'creation_time': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - 'service_provisioned_resource_group': {'readonly': True}, - 'private_link_count': {'readonly': True}, - 'private_endpoint_connections': {'readonly': True}, - 'notebook_info': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'}, - 'description': {'key': 'properties.description', 'type': 'str'}, - 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'}, - 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, - 'key_vault': {'key': 'properties.keyVault', 'type': 'str'}, - 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'}, - 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'}, - 'storage_account': {'key': 'properties.storageAccount', 'type': 'str'}, - 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionProperty'}, - 'hbi_workspace': {'key': 'properties.hbiWorkspace', 'type': 'bool'}, - 'service_provisioned_resource_group': {'key': 'properties.serviceProvisionedResourceGroup', 'type': 'str'}, - 'private_link_count': {'key': 'properties.privateLinkCount', 'type': 'int'}, - 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'}, - 'allow_public_access_when_behind_vnet': {'key': 'properties.allowPublicAccessWhenBehindVnet', 'type': 'bool'}, - 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, - 'shared_private_link_resources': {'key': 'properties.sharedPrivateLinkResources', 'type': '[SharedPrivateLinkResource]'}, - 'notebook_info': {'key': 'properties.notebookInfo', 'type': 'NotebookResourceInfo'}, - } - - def __init__( - self, - **kwargs - ): - super(Workspace, self).__init__(**kwargs) - self.workspace_id = None - self.description = kwargs.get('description', None) - self.friendly_name = kwargs.get('friendly_name', None) - self.creation_time = None - self.key_vault = kwargs.get('key_vault', None) - self.application_insights = kwargs.get('application_insights', None) - self.container_registry = kwargs.get('container_registry', None) - self.storage_account = kwargs.get('storage_account', None) - self.discovery_url = kwargs.get('discovery_url', None) - self.provisioning_state = None - self.encryption = kwargs.get('encryption', None) - self.hbi_workspace = kwargs.get('hbi_workspace', False) - self.service_provisioned_resource_group = None - self.private_link_count = None - self.image_build_compute = kwargs.get('image_build_compute', None) - self.allow_public_access_when_behind_vnet = kwargs.get('allow_public_access_when_behind_vnet', False) - self.private_endpoint_connections = None - self.shared_private_link_resources = kwargs.get('shared_private_link_resources', None) - self.notebook_info = None - - -class WorkspaceConnection(msrest.serialization.Model): - """Workspace connection. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: ResourceId of the workspace connection. - :vartype id: str - :ivar name: Friendly name of the workspace connection. - :vartype name: str - :ivar type: Resource type of workspace connection. - :vartype type: str - :param category: Category of the workspace connection. - :type category: str - :param target: Target of the workspace connection. - :type target: str - :param auth_type: Authorization type of the workspace connection. - :type auth_type: str - :param value: Value details of the workspace connection. - :type value: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'category': {'key': 'properties.category', 'type': 'str'}, - 'target': {'key': 'properties.target', 'type': 'str'}, - 'auth_type': {'key': 'properties.authType', 'type': 'str'}, - 'value': {'key': 'properties.value', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(WorkspaceConnection, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.category = kwargs.get('category', None) - self.target = kwargs.get('target', None) - self.auth_type = kwargs.get('auth_type', None) - self.value = kwargs.get('value', None) - - -class WorkspaceConnectionDto(msrest.serialization.Model): - """object used for creating workspace connection. - - :param name: Friendly name of the workspace connection. - :type name: str - :param category: Category of the workspace connection. - :type category: str - :param target: Target of the workspace connection. - :type target: str - :param auth_type: Authorization type of the workspace connection. - :type auth_type: str - :param value: Value details of the workspace connection. - :type value: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'category': {'key': 'properties.category', 'type': 'str'}, - 'target': {'key': 'properties.target', 'type': 'str'}, - 'auth_type': {'key': 'properties.authType', 'type': 'str'}, - 'value': {'key': 'properties.value', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(WorkspaceConnectionDto, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.category = kwargs.get('category', None) - self.target = kwargs.get('target', None) - self.auth_type = kwargs.get('auth_type', None) - self.value = kwargs.get('value', None) - - -class WorkspaceListResult(msrest.serialization.Model): - """The result of a request to list machine learning workspaces. - - :param value: The list of machine learning workspaces. Since this list may be incomplete, the - nextLink field should be used to request the next list of machine learning workspaces. - :type value: list[~azure.mgmt.machinelearningservices.models.Workspace] - :param next_link: The URI that can be used to request the next list of machine learning - workspaces. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[Workspace]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(WorkspaceListResult, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) - - -class WorkspaceSku(msrest.serialization.Model): - """AML workspace sku information. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar resource_type: - :vartype resource_type: str - :ivar skus: The list of workspace sku settings. - :vartype skus: list[~azure.mgmt.machinelearningservices.models.SkuSettings] - """ - - _validation = { - 'resource_type': {'readonly': True}, - 'skus': {'readonly': True}, - } - - _attribute_map = { - 'resource_type': {'key': 'resourceType', 'type': 'str'}, - 'skus': {'key': 'skus', 'type': '[SkuSettings]'}, - } - - def __init__( - self, - **kwargs - ): - super(WorkspaceSku, self).__init__(**kwargs) - self.resource_type = None - self.skus = None - - -class WorkspaceUpdateParameters(msrest.serialization.Model): - """The parameters for updating a machine learning workspace. - - :param tags: A set of tags. The resource tags for the machine learning workspace. - :type tags: dict[str, str] - :param sku: The sku of the workspace. - :type sku: ~azure.mgmt.machinelearningservices.models.Sku - :param description: The description of this workspace. - :type description: str - :param friendly_name: The friendly name for this workspace. - :type friendly_name: str - """ - - _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'description': {'key': 'properties.description', 'type': 'str'}, - 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(WorkspaceUpdateParameters, self).__init__(**kwargs) - self.tags = kwargs.get('tags', None) - self.sku = kwargs.get('sku', None) - self.description = kwargs.get('description', None) - self.friendly_name = kwargs.get('friendly_name', None) diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models_py3.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models_py3.py index 8abd97a8cef0..d763a94e3cca 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models_py3.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models_py3.py @@ -1,4 +1,5 @@ # coding=utf-8 +# pylint: disable=too-many-lines # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. @@ -7,87 +8,452 @@ # -------------------------------------------------------------------------- import datetime -from typing import Dict, List, Optional, Union +import sys +from typing import Any, Dict, List, Optional, TYPE_CHECKING, Union -from azure.core.exceptions import HttpResponseError -import msrest.serialization +from .. import _serialization -from ._azure_machine_learning_workspaces_enums import * +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from .. import models as _models +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object -class Compute(msrest.serialization.Model): +class WorkspaceConnectionPropertiesV2(_serialization.Model): + """WorkspaceConnectionPropertiesV2. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AccessKeyAuthTypeWorkspaceConnectionProperties, + ManagedIdentityAuthTypeWorkspaceConnectionProperties, + NoneAuthTypeWorkspaceConnectionProperties, PATAuthTypeWorkspaceConnectionProperties, + SASAuthTypeWorkspaceConnectionProperties, + ServicePrincipalAuthTypeWorkspaceConnectionProperties, + UsernamePasswordAuthTypeWorkspaceConnectionProperties + + All required parameters must be populated in order to send to Azure. + + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", and + "AccessKey". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. + :vartype category: str + :ivar target: + :vartype target: str + :ivar value: Value details of the workspace connection. + :vartype value: str + :ivar value_format: format for the workspace connection value. "JSON" + :vartype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + """ + + _validation = { + "auth_type": {"required": True}, + } + + _attribute_map = { + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "target": {"key": "target", "type": "str"}, + "value": {"key": "value", "type": "str"}, + "value_format": {"key": "valueFormat", "type": "str"}, + } + + _subtype_map = { + "auth_type": { + "AccessKey": "AccessKeyAuthTypeWorkspaceConnectionProperties", + "ManagedIdentity": "ManagedIdentityAuthTypeWorkspaceConnectionProperties", + "None": "NoneAuthTypeWorkspaceConnectionProperties", + "PAT": "PATAuthTypeWorkspaceConnectionProperties", + "SAS": "SASAuthTypeWorkspaceConnectionProperties", + "ServicePrincipal": "ServicePrincipalAuthTypeWorkspaceConnectionProperties", + "UsernamePassword": "UsernamePasswordAuthTypeWorkspaceConnectionProperties", + } + } + + def __init__( + self, + *, + category: Optional[str] = None, + target: Optional[str] = None, + value: Optional[str] = None, + value_format: Optional[Union[str, "_models.ValueFormat"]] = None, + **kwargs + ): + """ + :keyword category: Category of the connection. + :paramtype category: str + :keyword target: + :paramtype target: str + :keyword value: Value details of the workspace connection. + :paramtype value: str + :keyword value_format: format for the workspace connection value. "JSON" + :paramtype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + """ + super().__init__(**kwargs) + self.auth_type = None # type: Optional[str] + self.category = category + self.target = target + self.value = value + self.value_format = value_format + + +class AccessKeyAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): + """AccessKeyAuthTypeWorkspaceConnectionProperties. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", and + "AccessKey". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. + :vartype category: str + :ivar target: + :vartype target: str + :ivar value: Value details of the workspace connection. + :vartype value: str + :ivar value_format: format for the workspace connection value. "JSON" + :vartype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :ivar credentials: + :vartype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionAccessKey + """ + + _validation = { + "auth_type": {"required": True}, + } + + _attribute_map = { + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "target": {"key": "target", "type": "str"}, + "value": {"key": "value", "type": "str"}, + "value_format": {"key": "valueFormat", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionAccessKey"}, + } + + def __init__( + self, + *, + category: Optional[str] = None, + target: Optional[str] = None, + value: Optional[str] = None, + value_format: Optional[Union[str, "_models.ValueFormat"]] = None, + credentials: Optional["_models.WorkspaceConnectionAccessKey"] = None, + **kwargs + ): + """ + :keyword category: Category of the connection. + :paramtype category: str + :keyword target: + :paramtype target: str + :keyword value: Value details of the workspace connection. + :paramtype value: str + :keyword value_format: format for the workspace connection value. "JSON" + :paramtype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :keyword credentials: + :paramtype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionAccessKey + """ + super().__init__(category=category, target=target, value=value, value_format=value_format, **kwargs) + self.auth_type = "AccessKey" # type: str + self.credentials = credentials + + +class DatastoreCredentials(_serialization.Model): + """Base definition for datastore credentials. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AccountKeyDatastoreCredentials, CertificateDatastoreCredentials, KerberosKeytabCredentials, + KerberosPasswordCredentials, NoneDatastoreCredentials, SasDatastoreCredentials, + ServicePrincipalDatastoreCredentials + + All required parameters must be populated in order to send to Azure. + + :ivar credentials_type: [Required] Credential type used to authentication with storage. + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". + :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType + """ + + _validation = { + "credentials_type": {"required": True}, + } + + _attribute_map = { + "credentials_type": {"key": "credentialsType", "type": "str"}, + } + + _subtype_map = { + "credentials_type": { + "AccountKey": "AccountKeyDatastoreCredentials", + "Certificate": "CertificateDatastoreCredentials", + "KerberosKeytab": "KerberosKeytabCredentials", + "KerberosPassword": "KerberosPasswordCredentials", + "None": "NoneDatastoreCredentials", + "Sas": "SasDatastoreCredentials", + "ServicePrincipal": "ServicePrincipalDatastoreCredentials", + } + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.credentials_type = None # type: Optional[str] + + +class AccountKeyDatastoreCredentials(DatastoreCredentials): + """Account key datastore credentials configuration. + + All required parameters must be populated in order to send to Azure. + + :ivar credentials_type: [Required] Credential type used to authentication with storage. + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". + :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType + :ivar secrets: [Required] Storage account secrets. Required. + :vartype secrets: ~azure.mgmt.machinelearningservices.models.AccountKeyDatastoreSecrets + """ + + _validation = { + "credentials_type": {"required": True}, + "secrets": {"required": True}, + } + + _attribute_map = { + "credentials_type": {"key": "credentialsType", "type": "str"}, + "secrets": {"key": "secrets", "type": "AccountKeyDatastoreSecrets"}, + } + + def __init__(self, *, secrets: "_models.AccountKeyDatastoreSecrets", **kwargs): + """ + :keyword secrets: [Required] Storage account secrets. Required. + :paramtype secrets: ~azure.mgmt.machinelearningservices.models.AccountKeyDatastoreSecrets + """ + super().__init__(**kwargs) + self.credentials_type = "AccountKey" # type: str + self.secrets = secrets + + +class DatastoreSecrets(_serialization.Model): + """Base definition for datastore secrets. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AccountKeyDatastoreSecrets, CertificateDatastoreSecrets, KerberosKeytabSecrets, + KerberosPasswordSecrets, SasDatastoreSecrets, ServicePrincipalDatastoreSecrets + + All required parameters must be populated in order to send to Azure. + + :ivar secrets_type: [Required] Credential type used to authentication with storage. Required. + Known values are: "AccountKey", "Certificate", "Sas", "ServicePrincipal", "KerberosPassword", + and "KerberosKeytab". + :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType + """ + + _validation = { + "secrets_type": {"required": True}, + } + + _attribute_map = { + "secrets_type": {"key": "secretsType", "type": "str"}, + } + + _subtype_map = { + "secrets_type": { + "AccountKey": "AccountKeyDatastoreSecrets", + "Certificate": "CertificateDatastoreSecrets", + "KerberosKeytab": "KerberosKeytabSecrets", + "KerberosPassword": "KerberosPasswordSecrets", + "Sas": "SasDatastoreSecrets", + "ServicePrincipal": "ServicePrincipalDatastoreSecrets", + } + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.secrets_type = None # type: Optional[str] + + +class AccountKeyDatastoreSecrets(DatastoreSecrets): + """Datastore account key secrets. + + All required parameters must be populated in order to send to Azure. + + :ivar secrets_type: [Required] Credential type used to authentication with storage. Required. + Known values are: "AccountKey", "Certificate", "Sas", "ServicePrincipal", "KerberosPassword", + and "KerberosKeytab". + :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType + :ivar key: Storage account key. + :vartype key: str + """ + + _validation = { + "secrets_type": {"required": True}, + } + + _attribute_map = { + "secrets_type": {"key": "secretsType", "type": "str"}, + "key": {"key": "key", "type": "str"}, + } + + def __init__(self, *, key: Optional[str] = None, **kwargs): + """ + :keyword key: Storage account key. + :paramtype key: str + """ + super().__init__(**kwargs) + self.secrets_type = "AccountKey" # type: str + self.key = key + + +class AcrDetails(_serialization.Model): + """Details of ACR account to be used for the Registry. + + :ivar system_created_acr_account: + :vartype system_created_acr_account: + ~azure.mgmt.machinelearningservices.models.SystemCreatedAcrAccount + :ivar user_created_acr_account: + :vartype user_created_acr_account: + ~azure.mgmt.machinelearningservices.models.UserCreatedAcrAccount + """ + + _attribute_map = { + "system_created_acr_account": {"key": "systemCreatedAcrAccount", "type": "SystemCreatedAcrAccount"}, + "user_created_acr_account": {"key": "userCreatedAcrAccount", "type": "UserCreatedAcrAccount"}, + } + + def __init__( + self, + *, + system_created_acr_account: Optional["_models.SystemCreatedAcrAccount"] = None, + user_created_acr_account: Optional["_models.UserCreatedAcrAccount"] = None, + **kwargs + ): + """ + :keyword system_created_acr_account: + :paramtype system_created_acr_account: + ~azure.mgmt.machinelearningservices.models.SystemCreatedAcrAccount + :keyword user_created_acr_account: + :paramtype user_created_acr_account: + ~azure.mgmt.machinelearningservices.models.UserCreatedAcrAccount + """ + super().__init__(**kwargs) + self.system_created_acr_account = system_created_acr_account + self.user_created_acr_account = user_created_acr_account + + +class AKSSchema(_serialization.Model): + """AKSSchema. + + :ivar properties: AKS properties. + :vartype properties: ~azure.mgmt.machinelearningservices.models.AKSSchemaProperties + """ + + _attribute_map = { + "properties": {"key": "properties", "type": "AKSSchemaProperties"}, + } + + def __init__(self, *, properties: Optional["_models.AKSSchemaProperties"] = None, **kwargs): + """ + :keyword properties: AKS properties. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.AKSSchemaProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class Compute(_serialization.Model): """Machine Learning compute object. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AKS, AmlCompute, ComputeInstance, DataFactory, DataLakeAnalytics, Databricks, HDInsight, VirtualMachine. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AKS, AmlCompute, ComputeInstance, DataFactory, DataLakeAnalytics, Databricks, HDInsight, + Kubernetes, SynapseSpark, VirtualMachine Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. + ~azure.mgmt.machinelearningservices.models.ComputeProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. + :ivar modified_on: The time at which the compute was last modified. :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought from outside if true, or machine learning service provisioned it if false. :vartype is_attached_compute: bool + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "compute_location": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, + "disable_local_auth": {"readonly": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } _subtype_map = { - 'compute_type': {'AKS': 'AKS', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', 'DataFactory': 'DataFactory', 'DataLakeAnalytics': 'DataLakeAnalytics', 'Databricks': 'Databricks', 'HDInsight': 'HDInsight', 'VirtualMachine': 'VirtualMachine'} + "compute_type": { + "AKS": "AKS", + "AmlCompute": "AmlCompute", + "ComputeInstance": "ComputeInstance", + "DataFactory": "DataFactory", + "DataLakeAnalytics": "DataLakeAnalytics", + "Databricks": "Databricks", + "HDInsight": "HDInsight", + "Kubernetes": "Kubernetes", + "SynapseSpark": "SynapseSpark", + "VirtualMachine": "VirtualMachine", + } } - def __init__( - self, - *, - compute_location: Optional[str] = None, - description: Optional[str] = None, - resource_id: Optional[str] = None, - **kwargs - ): - super(Compute, self).__init__(**kwargs) + def __init__(self, *, description: Optional[str] = None, resource_id: Optional[str] = None, **kwargs): + """ + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + """ + super().__init__(**kwargs) self.compute_type = None # type: Optional[str] - self.compute_location = compute_location + self.compute_location = None self.provisioning_state = None self.description = description self.created_on = None @@ -95,142 +461,209 @@ def __init__( self.resource_id = resource_id self.provisioning_errors = None self.is_attached_compute = None + self.disable_local_auth = None -class AKS(Compute): +class AKS(Compute, AKSSchema): # pylint: disable=too-many-instance-attributes """A Machine Learning compute based on AKS. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str + :ivar properties: AKS properties. + :vartype properties: ~azure.mgmt.machinelearningservices.models.AKSSchemaProperties + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. + ~azure.mgmt.machinelearningservices.models.ComputeProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. + :ivar modified_on: The time at which the compute was last modified. :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought from outside if true, or machine learning service provisioned it if false. :vartype is_attached_compute: bool - :param properties: AKS properties. - :type properties: ~azure.mgmt.machinelearningservices.models.AKSProperties + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "compute_location": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, + "disable_local_auth": {"readonly": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'AKSProperties'}, + "properties": {"key": "properties", "type": "AKSSchemaProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } def __init__( self, *, - compute_location: Optional[str] = None, + properties: Optional["_models.AKSSchemaProperties"] = None, description: Optional[str] = None, resource_id: Optional[str] = None, - properties: Optional["AKSProperties"] = None, **kwargs ): - super(AKS, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) - self.compute_type = 'AKS' # type: str + """ + :keyword properties: AKS properties. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.AKSSchemaProperties + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + """ + super().__init__(description=description, resource_id=resource_id, properties=properties, **kwargs) self.properties = properties + self.compute_type = "AKS" # type: str + self.compute_location = None + self.provisioning_state = None + self.description = description + self.created_on = None + self.modified_on = None + self.resource_id = resource_id + self.provisioning_errors = None + self.is_attached_compute = None + self.disable_local_auth = None + + +class AksComputeSecretsProperties(_serialization.Model): + """Properties of AksComputeSecrets. + + :ivar user_kube_config: Content of kubeconfig file that can be used to connect to the + Kubernetes cluster. + :vartype user_kube_config: str + :ivar admin_kube_config: Content of kubeconfig file that can be used to connect to the + Kubernetes cluster. + :vartype admin_kube_config: str + :ivar image_pull_secret_name: Image registry pull secret. + :vartype image_pull_secret_name: str + """ + + _attribute_map = { + "user_kube_config": {"key": "userKubeConfig", "type": "str"}, + "admin_kube_config": {"key": "adminKubeConfig", "type": "str"}, + "image_pull_secret_name": {"key": "imagePullSecretName", "type": "str"}, + } + + def __init__( + self, + *, + user_kube_config: Optional[str] = None, + admin_kube_config: Optional[str] = None, + image_pull_secret_name: Optional[str] = None, + **kwargs + ): + """ + :keyword user_kube_config: Content of kubeconfig file that can be used to connect to the + Kubernetes cluster. + :paramtype user_kube_config: str + :keyword admin_kube_config: Content of kubeconfig file that can be used to connect to the + Kubernetes cluster. + :paramtype admin_kube_config: str + :keyword image_pull_secret_name: Image registry pull secret. + :paramtype image_pull_secret_name: str + """ + super().__init__(**kwargs) + self.user_kube_config = user_kube_config + self.admin_kube_config = admin_kube_config + self.image_pull_secret_name = image_pull_secret_name -class ComputeSecrets(msrest.serialization.Model): +class ComputeSecrets(_serialization.Model): """Secrets related to a Machine Learning compute. Might differ for every type of compute. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AksComputeSecrets, DatabricksComputeSecrets, VirtualMachineSecrets. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AksComputeSecrets, DatabricksComputeSecrets, VirtualMachineSecrets All required parameters must be populated in order to send to Azure. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType """ _validation = { - 'compute_type': {'required': True}, + "compute_type": {"required": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, + "compute_type": {"key": "computeType", "type": "str"}, } _subtype_map = { - 'compute_type': {'AKS': 'AksComputeSecrets', 'Databricks': 'DatabricksComputeSecrets', 'VirtualMachine': 'VirtualMachineSecrets'} + "compute_type": { + "AKS": "AksComputeSecrets", + "Databricks": "DatabricksComputeSecrets", + "VirtualMachine": "VirtualMachineSecrets", + } } - def __init__( - self, - **kwargs - ): - super(ComputeSecrets, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.compute_type = None # type: Optional[str] -class AksComputeSecrets(ComputeSecrets): +class AksComputeSecrets(ComputeSecrets, AksComputeSecretsProperties): """Secrets related to a Machine Learning compute based on AKS. All required parameters must be populated in order to send to Azure. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param user_kube_config: Content of kubeconfig file that can be used to connect to the + :ivar user_kube_config: Content of kubeconfig file that can be used to connect to the Kubernetes cluster. - :type user_kube_config: str - :param admin_kube_config: Content of kubeconfig file that can be used to connect to the + :vartype user_kube_config: str + :ivar admin_kube_config: Content of kubeconfig file that can be used to connect to the Kubernetes cluster. - :type admin_kube_config: str - :param image_pull_secret_name: Image registry pull secret. - :type image_pull_secret_name: str + :vartype admin_kube_config: str + :ivar image_pull_secret_name: Image registry pull secret. + :vartype image_pull_secret_name: str + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType """ _validation = { - 'compute_type': {'required': True}, + "compute_type": {"required": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'}, - 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'}, - 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'}, + "user_kube_config": {"key": "userKubeConfig", "type": "str"}, + "admin_kube_config": {"key": "adminKubeConfig", "type": "str"}, + "image_pull_secret_name": {"key": "imagePullSecretName", "type": "str"}, + "compute_type": {"key": "computeType", "type": "str"}, } def __init__( @@ -241,40 +674,57 @@ def __init__( image_pull_secret_name: Optional[str] = None, **kwargs ): - super(AksComputeSecrets, self).__init__(**kwargs) - self.compute_type = 'AKS' # type: str + """ + :keyword user_kube_config: Content of kubeconfig file that can be used to connect to the + Kubernetes cluster. + :paramtype user_kube_config: str + :keyword admin_kube_config: Content of kubeconfig file that can be used to connect to the + Kubernetes cluster. + :paramtype admin_kube_config: str + :keyword image_pull_secret_name: Image registry pull secret. + :paramtype image_pull_secret_name: str + """ + super().__init__( + user_kube_config=user_kube_config, + admin_kube_config=admin_kube_config, + image_pull_secret_name=image_pull_secret_name, + **kwargs + ) self.user_kube_config = user_kube_config self.admin_kube_config = admin_kube_config self.image_pull_secret_name = image_pull_secret_name + self.compute_type = "AKS" # type: str -class AksNetworkingConfiguration(msrest.serialization.Model): +class AksNetworkingConfiguration(_serialization.Model): """Advance configuration for AKS networking. - :param subnet_id: Virtual network subnet resource ID the compute nodes belong to. - :type subnet_id: str - :param service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It must + :ivar subnet_id: Virtual network subnet resource ID the compute nodes belong to. + :vartype subnet_id: str + :ivar service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It must not overlap with any Subnet IP ranges. - :type service_cidr: str - :param dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be within + :vartype service_cidr: str + :ivar dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be within the Kubernetes service address range specified in serviceCidr. - :type dns_service_ip: str - :param docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It + :vartype dns_service_ip: str + :ivar docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It must not overlap with any Subnet IP ranges or the Kubernetes service address range. - :type docker_bridge_cidr: str + :vartype docker_bridge_cidr: str """ _validation = { - 'service_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'}, - 'dns_service_ip': {'pattern': r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'}, - 'docker_bridge_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'}, + "service_cidr": {"pattern": r"^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$"}, + "dns_service_ip": { + "pattern": r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$" + }, + "docker_bridge_cidr": {"pattern": r"^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$"}, } _attribute_map = { - 'subnet_id': {'key': 'subnetId', 'type': 'str'}, - 'service_cidr': {'key': 'serviceCidr', 'type': 'str'}, - 'dns_service_ip': {'key': 'dnsServiceIP', 'type': 'str'}, - 'docker_bridge_cidr': {'key': 'dockerBridgeCidr', 'type': 'str'}, + "subnet_id": {"key": "subnetId", "type": "str"}, + "service_cidr": {"key": "serviceCidr", "type": "str"}, + "dns_service_ip": {"key": "dnsServiceIP", "type": "str"}, + "docker_bridge_cidr": {"key": "dockerBridgeCidr", "type": "str"}, } def __init__( @@ -286,45 +736,69 @@ def __init__( docker_bridge_cidr: Optional[str] = None, **kwargs ): - super(AksNetworkingConfiguration, self).__init__(**kwargs) + """ + :keyword subnet_id: Virtual network subnet resource ID the compute nodes belong to. + :paramtype subnet_id: str + :keyword service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It + must not overlap with any Subnet IP ranges. + :paramtype service_cidr: str + :keyword dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be + within the Kubernetes service address range specified in serviceCidr. + :paramtype dns_service_ip: str + :keyword docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It + must not overlap with any Subnet IP ranges or the Kubernetes service address range. + :paramtype docker_bridge_cidr: str + """ + super().__init__(**kwargs) self.subnet_id = subnet_id self.service_cidr = service_cidr self.dns_service_ip = dns_service_ip self.docker_bridge_cidr = docker_bridge_cidr -class AKSProperties(msrest.serialization.Model): +class AKSSchemaProperties(_serialization.Model): """AKS properties. Variables are only populated by the server, and will be ignored when sending a request. - :param cluster_fqdn: Cluster full qualified domain name. - :type cluster_fqdn: str + :ivar cluster_fqdn: Cluster full qualified domain name. + :vartype cluster_fqdn: str :ivar system_services: System services. :vartype system_services: list[~azure.mgmt.machinelearningservices.models.SystemService] - :param agent_count: Number of agents. - :type agent_count: int - :param agent_vm_size: Agent virtual machine size. - :type agent_vm_size: str - :param ssl_configuration: SSL configuration. - :type ssl_configuration: ~azure.mgmt.machinelearningservices.models.SslConfiguration - :param aks_networking_configuration: AKS networking configuration for vnet. - :type aks_networking_configuration: + :ivar agent_count: Number of agents. + :vartype agent_count: int + :ivar agent_vm_size: Agent virtual machine size. + :vartype agent_vm_size: str + :ivar cluster_purpose: Intended usage of the cluster. Known values are: "FastProd", + "DenseProd", and "DevTest". + :vartype cluster_purpose: str or ~azure.mgmt.machinelearningservices.models.ClusterPurpose + :ivar ssl_configuration: SSL configuration. + :vartype ssl_configuration: ~azure.mgmt.machinelearningservices.models.SslConfiguration + :ivar aks_networking_configuration: AKS networking configuration for vnet. + :vartype aks_networking_configuration: ~azure.mgmt.machinelearningservices.models.AksNetworkingConfiguration + :ivar load_balancer_type: Load Balancer Type. Known values are: "PublicIp" and + "InternalLoadBalancer". + :vartype load_balancer_type: str or ~azure.mgmt.machinelearningservices.models.LoadBalancerType + :ivar load_balancer_subnet: Load Balancer Subnet. + :vartype load_balancer_subnet: str """ _validation = { - 'system_services': {'readonly': True}, - 'agent_count': {'minimum': 1}, + "system_services": {"readonly": True}, + "agent_count": {"minimum": 0}, } _attribute_map = { - 'cluster_fqdn': {'key': 'clusterFqdn', 'type': 'str'}, - 'system_services': {'key': 'systemServices', 'type': '[SystemService]'}, - 'agent_count': {'key': 'agentCount', 'type': 'int'}, - 'agent_vm_size': {'key': 'agentVMSize', 'type': 'str'}, - 'ssl_configuration': {'key': 'sslConfiguration', 'type': 'SslConfiguration'}, - 'aks_networking_configuration': {'key': 'aksNetworkingConfiguration', 'type': 'AksNetworkingConfiguration'}, + "cluster_fqdn": {"key": "clusterFqdn", "type": "str"}, + "system_services": {"key": "systemServices", "type": "[SystemService]"}, + "agent_count": {"key": "agentCount", "type": "int"}, + "agent_vm_size": {"key": "agentVmSize", "type": "str"}, + "cluster_purpose": {"key": "clusterPurpose", "type": "str"}, + "ssl_configuration": {"key": "sslConfiguration", "type": "SslConfiguration"}, + "aks_networking_configuration": {"key": "aksNetworkingConfiguration", "type": "AksNetworkingConfiguration"}, + "load_balancer_type": {"key": "loadBalancerType", "type": "str"}, + "load_balancer_subnet": {"key": "loadBalancerSubnet", "type": "str"}, } def __init__( @@ -333,92 +807,214 @@ def __init__( cluster_fqdn: Optional[str] = None, agent_count: Optional[int] = None, agent_vm_size: Optional[str] = None, - ssl_configuration: Optional["SslConfiguration"] = None, - aks_networking_configuration: Optional["AksNetworkingConfiguration"] = None, + cluster_purpose: Union[str, "_models.ClusterPurpose"] = "FastProd", + ssl_configuration: Optional["_models.SslConfiguration"] = None, + aks_networking_configuration: Optional["_models.AksNetworkingConfiguration"] = None, + load_balancer_type: Union[str, "_models.LoadBalancerType"] = "PublicIp", + load_balancer_subnet: Optional[str] = None, **kwargs ): - super(AKSProperties, self).__init__(**kwargs) + """ + :keyword cluster_fqdn: Cluster full qualified domain name. + :paramtype cluster_fqdn: str + :keyword agent_count: Number of agents. + :paramtype agent_count: int + :keyword agent_vm_size: Agent virtual machine size. + :paramtype agent_vm_size: str + :keyword cluster_purpose: Intended usage of the cluster. Known values are: "FastProd", + "DenseProd", and "DevTest". + :paramtype cluster_purpose: str or ~azure.mgmt.machinelearningservices.models.ClusterPurpose + :keyword ssl_configuration: SSL configuration. + :paramtype ssl_configuration: ~azure.mgmt.machinelearningservices.models.SslConfiguration + :keyword aks_networking_configuration: AKS networking configuration for vnet. + :paramtype aks_networking_configuration: + ~azure.mgmt.machinelearningservices.models.AksNetworkingConfiguration + :keyword load_balancer_type: Load Balancer Type. Known values are: "PublicIp" and + "InternalLoadBalancer". + :paramtype load_balancer_type: str or + ~azure.mgmt.machinelearningservices.models.LoadBalancerType + :keyword load_balancer_subnet: Load Balancer Subnet. + :paramtype load_balancer_subnet: str + """ + super().__init__(**kwargs) self.cluster_fqdn = cluster_fqdn self.system_services = None self.agent_count = agent_count self.agent_vm_size = agent_vm_size + self.cluster_purpose = cluster_purpose self.ssl_configuration = ssl_configuration self.aks_networking_configuration = aks_networking_configuration + self.load_balancer_type = load_balancer_type + self.load_balancer_subnet = load_balancer_subnet + + +class Nodes(_serialization.Model): + """Abstract Nodes definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AllNodes + + All required parameters must be populated in order to send to Azure. + + :ivar nodes_value_type: [Required] Type of the Nodes value. Required. Known values are: "All" + and "Custom". + :vartype nodes_value_type: str or ~azure.mgmt.machinelearningservices.models.NodesValueType + """ + + _validation = { + "nodes_value_type": {"required": True}, + } + + _attribute_map = { + "nodes_value_type": {"key": "nodesValueType", "type": "str"}, + } + + _subtype_map = {"nodes_value_type": {"All": "AllNodes"}} + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.nodes_value_type = None # type: Optional[str] + + +class AllNodes(Nodes): + """All nodes means the service will be running on all of the nodes of the job. + + All required parameters must be populated in order to send to Azure. + + :ivar nodes_value_type: [Required] Type of the Nodes value. Required. Known values are: "All" + and "Custom". + :vartype nodes_value_type: str or ~azure.mgmt.machinelearningservices.models.NodesValueType + """ + + _validation = { + "nodes_value_type": {"required": True}, + } + + _attribute_map = { + "nodes_value_type": {"key": "nodesValueType", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.nodes_value_type = "All" # type: str + + +class AmlComputeSchema(_serialization.Model): + """Properties(top level) of AmlCompute. + + :ivar properties: Properties of AmlCompute. + :vartype properties: ~azure.mgmt.machinelearningservices.models.AmlComputeProperties + """ + + _attribute_map = { + "properties": {"key": "properties", "type": "AmlComputeProperties"}, + } + + def __init__(self, *, properties: Optional["_models.AmlComputeProperties"] = None, **kwargs): + """ + :keyword properties: Properties of AmlCompute. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.AmlComputeProperties + """ + super().__init__(**kwargs) + self.properties = properties -class AmlCompute(Compute): +class AmlCompute(Compute, AmlComputeSchema): # pylint: disable=too-many-instance-attributes """An Azure Machine Learning compute. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str + :ivar properties: Properties of AmlCompute. + :vartype properties: ~azure.mgmt.machinelearningservices.models.AmlComputeProperties + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. + ~azure.mgmt.machinelearningservices.models.ComputeProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. + :ivar modified_on: The time at which the compute was last modified. :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought from outside if true, or machine learning service provisioned it if false. :vartype is_attached_compute: bool - :param properties: AML Compute properties. - :type properties: ~azure.mgmt.machinelearningservices.models.AmlComputeProperties + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "compute_location": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, + "disable_local_auth": {"readonly": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'}, + "properties": {"key": "properties", "type": "AmlComputeProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } def __init__( self, *, - compute_location: Optional[str] = None, + properties: Optional["_models.AmlComputeProperties"] = None, description: Optional[str] = None, resource_id: Optional[str] = None, - properties: Optional["AmlComputeProperties"] = None, **kwargs ): - super(AmlCompute, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) - self.compute_type = 'AmlCompute' # type: str + """ + :keyword properties: Properties of AmlCompute. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.AmlComputeProperties + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + """ + super().__init__(description=description, resource_id=resource_id, properties=properties, **kwargs) self.properties = properties + self.compute_type = "AmlCompute" # type: str + self.compute_location = None + self.provisioning_state = None + self.description = description + self.created_on = None + self.modified_on = None + self.resource_id = resource_id + self.provisioning_errors = None + self.is_attached_compute = None + self.disable_local_auth = None -class AmlComputeNodeInformation(msrest.serialization.Model): +class AmlComputeNodeInformation(_serialization.Model): """Compute node information related to a AmlCompute. Variables are only populated by the server, and will be ignored when sending a request. @@ -432,36 +1028,34 @@ class AmlComputeNodeInformation(msrest.serialization.Model): :ivar port: SSH port number of the node. :vartype port: int :ivar node_state: State of the compute node. Values are idle, running, preparing, unusable, - leaving and preempted. Possible values include: "idle", "running", "preparing", "unusable", - "leaving", "preempted". + leaving and preempted. Known values are: "idle", "running", "preparing", "unusable", "leaving", + and "preempted". :vartype node_state: str or ~azure.mgmt.machinelearningservices.models.NodeState :ivar run_id: ID of the Experiment running on the node, if any else null. :vartype run_id: str """ _validation = { - 'node_id': {'readonly': True}, - 'private_ip_address': {'readonly': True}, - 'public_ip_address': {'readonly': True}, - 'port': {'readonly': True}, - 'node_state': {'readonly': True}, - 'run_id': {'readonly': True}, + "node_id": {"readonly": True}, + "private_ip_address": {"readonly": True}, + "public_ip_address": {"readonly": True}, + "port": {"readonly": True}, + "node_state": {"readonly": True}, + "run_id": {"readonly": True}, } _attribute_map = { - 'node_id': {'key': 'nodeId', 'type': 'str'}, - 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, - 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, - 'port': {'key': 'port', 'type': 'int'}, - 'node_state': {'key': 'nodeState', 'type': 'str'}, - 'run_id': {'key': 'runId', 'type': 'str'}, + "node_id": {"key": "nodeId", "type": "str"}, + "private_ip_address": {"key": "privateIpAddress", "type": "str"}, + "public_ip_address": {"key": "publicIpAddress", "type": "str"}, + "port": {"key": "port", "type": "int"}, + "node_state": {"key": "nodeState", "type": "str"}, + "run_id": {"key": "runId", "type": "str"}, } - def __init__( - self, - **kwargs - ): - super(AmlComputeNodeInformation, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.node_id = None self.private_ip_address = None self.public_ip_address = None @@ -470,124 +1064,78 @@ def __init__( self.run_id = None -class ComputeNodesInformation(msrest.serialization.Model): - """Compute nodes information related to a Machine Learning compute. Might differ for every type of compute. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmlComputeNodesInformation. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :ivar next_link: The continuation token. - :vartype next_link: str - """ - - _validation = { - 'compute_type': {'required': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - _subtype_map = { - 'compute_type': {'AmlCompute': 'AmlComputeNodesInformation'} - } - - def __init__( - self, - **kwargs - ): - super(ComputeNodesInformation, self).__init__(**kwargs) - self.compute_type = None # type: Optional[str] - self.next_link = None - - -class AmlComputeNodesInformation(ComputeNodesInformation): - """Compute node information related to a AmlCompute. +class AmlComputeNodesInformation(_serialization.Model): + """Result of AmlCompute Nodes. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :ivar next_link: The continuation token. - :vartype next_link: str :ivar nodes: The collection of returned AmlCompute nodes details. :vartype nodes: list[~azure.mgmt.machinelearningservices.models.AmlComputeNodeInformation] + :ivar next_link: The continuation token. + :vartype next_link: str """ _validation = { - 'compute_type': {'required': True}, - 'next_link': {'readonly': True}, - 'nodes': {'readonly': True}, + "nodes": {"readonly": True}, + "next_link": {"readonly": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'nodes': {'key': 'nodes', 'type': '[AmlComputeNodeInformation]'}, + "nodes": {"key": "nodes", "type": "[AmlComputeNodeInformation]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - **kwargs - ): - super(AmlComputeNodesInformation, self).__init__(**kwargs) - self.compute_type = 'AmlCompute' # type: str + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.nodes = None + self.next_link = None -class AmlComputeProperties(msrest.serialization.Model): +class AmlComputeProperties(_serialization.Model): # pylint: disable=too-many-instance-attributes """AML Compute properties. Variables are only populated by the server, and will be ignored when sending a request. - :param vm_size: Virtual Machine Size. - :type vm_size: str - :param vm_priority: Virtual Machine priority. Possible values include: "Dedicated", - "LowPriority". - :type vm_priority: str or ~azure.mgmt.machinelearningservices.models.VmPriority - :param scale_settings: Scale settings for AML Compute. - :type scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings - :param user_account_credentials: Credentials for an administrator user account that will be + :ivar os_type: Compute OS Type. Known values are: "Linux" and "Windows". + :vartype os_type: str or ~azure.mgmt.machinelearningservices.models.OsType + :ivar vm_size: Virtual Machine Size. + :vartype vm_size: str + :ivar vm_priority: Virtual Machine priority. Known values are: "Dedicated" and "LowPriority". + :vartype vm_priority: str or ~azure.mgmt.machinelearningservices.models.VmPriority + :ivar virtual_machine_image: Virtual Machine image for AML Compute - windows only. + :vartype virtual_machine_image: ~azure.mgmt.machinelearningservices.models.VirtualMachineImage + :ivar isolated_network: Network is isolated or not. + :vartype isolated_network: bool + :ivar scale_settings: Scale settings for AML Compute. + :vartype scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings + :ivar user_account_credentials: Credentials for an administrator user account that will be created on each compute node. - :type user_account_credentials: + :vartype user_account_credentials: ~azure.mgmt.machinelearningservices.models.UserAccountCredentials - :param subnet: Virtual network subnet resource ID the compute nodes belong to. - :type subnet: ~azure.mgmt.machinelearningservices.models.ResourceId - :param remote_login_port_public_access: State of the public SSH port. Possible values are: + :ivar subnet: Virtual network subnet resource ID the compute nodes belong to. + :vartype subnet: ~azure.mgmt.machinelearningservices.models.ResourceId + :ivar remote_login_port_public_access: State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh port is closed on all nodes of the cluster. Enabled - Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified - Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined, else is open all public nodes. It can be default only during cluster creation time, after - creation it will be either enabled or disabled. Possible values include: "Enabled", "Disabled", - "NotSpecified". Default value: "NotSpecified". - :type remote_login_port_public_access: str or + creation it will be either enabled or disabled. Known values are: "Enabled", "Disabled", and + "NotSpecified". + :vartype remote_login_port_public_access: str or ~azure.mgmt.machinelearningservices.models.RemoteLoginPortPublicAccess :ivar allocation_state: Allocation state of the compute. Possible values are: steady - Indicates that the compute is not resizing. There are no changes to the number of compute nodes in the compute in progress. A compute enters this state when it is created and when no operations are being performed on the compute to change the number of compute nodes. resizing - Indicates that the compute is resizing; that is, compute nodes are being added to or removed - from the compute. Possible values include: "Steady", "Resizing". + from the compute. Known values are: "Steady" and "Resizing". :vartype allocation_state: str or ~azure.mgmt.machinelearningservices.models.AllocationState :ivar allocation_state_transition_time: The time at which the compute entered its current allocation state. :vartype allocation_state_transition_time: ~datetime.datetime :ivar errors: Collection of errors encountered by various compute nodes during node setup. - :vartype errors: list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] + :vartype errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] :ivar current_node_count: The number of compute nodes currently assigned to the compute. :vartype current_node_count: int :ivar target_node_count: The target number of compute nodes for the compute. If the @@ -597,46 +1145,104 @@ class AmlComputeProperties(msrest.serialization.Model): :vartype target_node_count: int :ivar node_state_counts: Counts of various node states on the compute. :vartype node_state_counts: ~azure.mgmt.machinelearningservices.models.NodeStateCounts + :ivar enable_node_public_ip: Enable or disable node public IP address provisioning. Possible + values are: Possible values are: true - Indicates that the compute nodes will have public IPs + provisioned. false - Indicates that the compute nodes will have a private endpoint and no + public IPs. + :vartype enable_node_public_ip: bool + :ivar property_bag: A property bag containing additional properties. + :vartype property_bag: JSON """ _validation = { - 'allocation_state': {'readonly': True}, - 'allocation_state_transition_time': {'readonly': True}, - 'errors': {'readonly': True}, - 'current_node_count': {'readonly': True}, - 'target_node_count': {'readonly': True}, - 'node_state_counts': {'readonly': True}, + "allocation_state": {"readonly": True}, + "allocation_state_transition_time": {"readonly": True}, + "errors": {"readonly": True}, + "current_node_count": {"readonly": True}, + "target_node_count": {"readonly": True}, + "node_state_counts": {"readonly": True}, } _attribute_map = { - 'vm_size': {'key': 'vmSize', 'type': 'str'}, - 'vm_priority': {'key': 'vmPriority', 'type': 'str'}, - 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'}, - 'user_account_credentials': {'key': 'userAccountCredentials', 'type': 'UserAccountCredentials'}, - 'subnet': {'key': 'subnet', 'type': 'ResourceId'}, - 'remote_login_port_public_access': {'key': 'remoteLoginPortPublicAccess', 'type': 'str'}, - 'allocation_state': {'key': 'allocationState', 'type': 'str'}, - 'allocation_state_transition_time': {'key': 'allocationStateTransitionTime', 'type': 'iso-8601'}, - 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'}, - 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'}, - 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'}, - 'node_state_counts': {'key': 'nodeStateCounts', 'type': 'NodeStateCounts'}, + "os_type": {"key": "osType", "type": "str"}, + "vm_size": {"key": "vmSize", "type": "str"}, + "vm_priority": {"key": "vmPriority", "type": "str"}, + "virtual_machine_image": {"key": "virtualMachineImage", "type": "VirtualMachineImage"}, + "isolated_network": {"key": "isolatedNetwork", "type": "bool"}, + "scale_settings": {"key": "scaleSettings", "type": "ScaleSettings"}, + "user_account_credentials": {"key": "userAccountCredentials", "type": "UserAccountCredentials"}, + "subnet": {"key": "subnet", "type": "ResourceId"}, + "remote_login_port_public_access": {"key": "remoteLoginPortPublicAccess", "type": "str"}, + "allocation_state": {"key": "allocationState", "type": "str"}, + "allocation_state_transition_time": {"key": "allocationStateTransitionTime", "type": "iso-8601"}, + "errors": {"key": "errors", "type": "[ErrorResponse]"}, + "current_node_count": {"key": "currentNodeCount", "type": "int"}, + "target_node_count": {"key": "targetNodeCount", "type": "int"}, + "node_state_counts": {"key": "nodeStateCounts", "type": "NodeStateCounts"}, + "enable_node_public_ip": {"key": "enableNodePublicIp", "type": "bool"}, + "property_bag": {"key": "propertyBag", "type": "object"}, } def __init__( self, *, + os_type: Union[str, "_models.OsType"] = "Linux", vm_size: Optional[str] = None, - vm_priority: Optional[Union[str, "VmPriority"]] = None, - scale_settings: Optional["ScaleSettings"] = None, - user_account_credentials: Optional["UserAccountCredentials"] = None, - subnet: Optional["ResourceId"] = None, - remote_login_port_public_access: Optional[Union[str, "RemoteLoginPortPublicAccess"]] = "NotSpecified", + vm_priority: Optional[Union[str, "_models.VmPriority"]] = None, + virtual_machine_image: Optional["_models.VirtualMachineImage"] = None, + isolated_network: Optional[bool] = None, + scale_settings: Optional["_models.ScaleSettings"] = None, + user_account_credentials: Optional["_models.UserAccountCredentials"] = None, + subnet: Optional["_models.ResourceId"] = None, + remote_login_port_public_access: Union[str, "_models.RemoteLoginPortPublicAccess"] = "NotSpecified", + enable_node_public_ip: bool = True, + property_bag: Optional[JSON] = None, **kwargs ): - super(AmlComputeProperties, self).__init__(**kwargs) + """ + :keyword os_type: Compute OS Type. Known values are: "Linux" and "Windows". + :paramtype os_type: str or ~azure.mgmt.machinelearningservices.models.OsType + :keyword vm_size: Virtual Machine Size. + :paramtype vm_size: str + :keyword vm_priority: Virtual Machine priority. Known values are: "Dedicated" and + "LowPriority". + :paramtype vm_priority: str or ~azure.mgmt.machinelearningservices.models.VmPriority + :keyword virtual_machine_image: Virtual Machine image for AML Compute - windows only. + :paramtype virtual_machine_image: + ~azure.mgmt.machinelearningservices.models.VirtualMachineImage + :keyword isolated_network: Network is isolated or not. + :paramtype isolated_network: bool + :keyword scale_settings: Scale settings for AML Compute. + :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings + :keyword user_account_credentials: Credentials for an administrator user account that will be + created on each compute node. + :paramtype user_account_credentials: + ~azure.mgmt.machinelearningservices.models.UserAccountCredentials + :keyword subnet: Virtual network subnet resource ID the compute nodes belong to. + :paramtype subnet: ~azure.mgmt.machinelearningservices.models.ResourceId + :keyword remote_login_port_public_access: State of the public SSH port. Possible values are: + Disabled - Indicates that the public ssh port is closed on all nodes of the cluster. Enabled - + Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified - + Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined, + else is open all public nodes. It can be default only during cluster creation time, after + creation it will be either enabled or disabled. Known values are: "Enabled", "Disabled", and + "NotSpecified". + :paramtype remote_login_port_public_access: str or + ~azure.mgmt.machinelearningservices.models.RemoteLoginPortPublicAccess + :keyword enable_node_public_ip: Enable or disable node public IP address provisioning. Possible + values are: Possible values are: true - Indicates that the compute nodes will have public IPs + provisioned. false - Indicates that the compute nodes will have a private endpoint and no + public IPs. + :paramtype enable_node_public_ip: bool + :keyword property_bag: A property bag containing additional properties. + :paramtype property_bag: JSON + """ + super().__init__(**kwargs) + self.os_type = os_type self.vm_size = vm_size self.vm_priority = vm_priority + self.virtual_machine_image = virtual_machine_image + self.isolated_network = isolated_network self.scale_settings = scale_settings self.user_account_credentials = user_account_credentials self.subnet = subnet @@ -647,2768 +1253,22804 @@ def __init__( self.current_node_count = None self.target_node_count = None self.node_state_counts = None + self.enable_node_public_ip = enable_node_public_ip + self.property_bag = property_bag -class AmlUserFeature(msrest.serialization.Model): - """Features enabled for a workspace. +class AmlOperation(_serialization.Model): + """Azure Machine Learning REST API operation. - :param id: Specifies the feature ID. - :type id: str - :param display_name: Specifies the feature name. - :type display_name: str - :param description: Describes the feature for user experience. - :type description: str + :ivar name: Operation name: {provider}/{resource}/{operation}. + :vartype name: str + :ivar display: Display name of operation. + :vartype display: ~azure.mgmt.machinelearningservices.models.AmlOperationDisplay + :ivar is_data_action: Indicates whether the operation applies to data-plane. + :vartype is_data_action: bool """ _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, + "display": {"key": "display", "type": "AmlOperationDisplay"}, + "is_data_action": {"key": "isDataAction", "type": "bool"}, } def __init__( self, *, - id: Optional[str] = None, - display_name: Optional[str] = None, - description: Optional[str] = None, + name: Optional[str] = None, + display: Optional["_models.AmlOperationDisplay"] = None, + is_data_action: Optional[bool] = None, **kwargs ): - super(AmlUserFeature, self).__init__(**kwargs) - self.id = id - self.display_name = display_name - self.description = description + """ + :keyword name: Operation name: {provider}/{resource}/{operation}. + :paramtype name: str + :keyword display: Display name of operation. + :paramtype display: ~azure.mgmt.machinelearningservices.models.AmlOperationDisplay + :keyword is_data_action: Indicates whether the operation applies to data-plane. + :paramtype is_data_action: bool + """ + super().__init__(**kwargs) + self.name = name + self.display = display + self.is_data_action = is_data_action -class ClusterUpdateParameters(msrest.serialization.Model): - """AmlCompute update parameters. +class AmlOperationDisplay(_serialization.Model): + """Display name of operation. - :param scale_settings: Desired scale settings for the amlCompute. - :type scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings + :ivar provider: The resource provider name: Microsoft.MachineLearningExperimentation. + :vartype provider: str + :ivar resource: The resource on which the operation is performed. + :vartype resource: str + :ivar operation: The operation that users can perform. + :vartype operation: str + :ivar description: The description for the operation. + :vartype description: str """ _attribute_map = { - 'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'}, + "provider": {"key": "provider", "type": "str"}, + "resource": {"key": "resource", "type": "str"}, + "operation": {"key": "operation", "type": "str"}, + "description": {"key": "description", "type": "str"}, } def __init__( self, *, - scale_settings: Optional["ScaleSettings"] = None, + provider: Optional[str] = None, + resource: Optional[str] = None, + operation: Optional[str] = None, + description: Optional[str] = None, **kwargs ): - super(ClusterUpdateParameters, self).__init__(**kwargs) - self.scale_settings = scale_settings + """ + :keyword provider: The resource provider name: Microsoft.MachineLearningExperimentation. + :paramtype provider: str + :keyword resource: The resource on which the operation is performed. + :paramtype resource: str + :keyword operation: The operation that users can perform. + :paramtype operation: str + :keyword description: The description for the operation. + :paramtype description: str + """ + super().__init__(**kwargs) + self.provider = provider + self.resource = resource + self.operation = operation + self.description = description -class ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties(msrest.serialization.Model): - """ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties. +class AmlOperationListResult(_serialization.Model): + """An array of operations supported by the resource provider. - Variables are only populated by the server, and will be ignored when sending a request. + :ivar value: List of AML operations supported by the AML resource provider. + :vartype value: list[~azure.mgmt.machinelearningservices.models.AmlOperation] + """ - :ivar principal_id: The principal id of user assigned identity. - :vartype principal_id: str - :ivar client_id: The client id of user assigned identity. - :vartype client_id: str + _attribute_map = { + "value": {"key": "value", "type": "[AmlOperation]"}, + } + + def __init__(self, *, value: Optional[List["_models.AmlOperation"]] = None, **kwargs): + """ + :keyword value: List of AML operations supported by the AML resource provider. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.AmlOperation] + """ + super().__init__(**kwargs) + self.value = value + + +class IdentityConfiguration(_serialization.Model): + """Base definition for identity configuration. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AmlToken, ManagedIdentity, UserIdentity + + All required parameters must be populated in order to send to Azure. + + :ivar identity_type: [Required] Specifies the type of identity framework. Required. Known + values are: "Managed", "AMLToken", and "UserIdentity". + :vartype identity_type: str or + ~azure.mgmt.machinelearningservices.models.IdentityConfigurationType """ _validation = { - 'principal_id': {'readonly': True}, - 'client_id': {'readonly': True}, + "identity_type": {"required": True}, } _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'client_id': {'key': 'clientId', 'type': 'str'}, + "identity_type": {"key": "identityType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - super(ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties, self).__init__(**kwargs) - self.principal_id = None - self.client_id = None + _subtype_map = { + "identity_type": {"AMLToken": "AmlToken", "Managed": "ManagedIdentity", "UserIdentity": "UserIdentity"} + } + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.identity_type = None # type: Optional[str] -class ComputeInstance(Compute): - """An Azure Machine Learning compute instance. - Variables are only populated by the server, and will be ignored when sending a request. +class AmlToken(IdentityConfiguration): + """AML Token identity configuration. All required parameters must be populated in order to send to Azure. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :param properties: Compute Instance properties. - :type properties: ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties + :ivar identity_type: [Required] Specifies the type of identity framework. Required. Known + values are: "Managed", "AMLToken", and "UserIdentity". + :vartype identity_type: str or + ~azure.mgmt.machinelearningservices.models.IdentityConfigurationType """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "identity_type": {"required": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'}, + "identity_type": {"key": "identityType", "type": "str"}, } - def __init__( - self, - *, - compute_location: Optional[str] = None, - description: Optional[str] = None, - resource_id: Optional[str] = None, - properties: Optional["ComputeInstanceProperties"] = None, - **kwargs - ): - super(ComputeInstance, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) - self.compute_type = 'ComputeInstance' # type: str - self.properties = properties + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.identity_type = "AMLToken" # type: str -class ComputeInstanceApplication(msrest.serialization.Model): - """Defines an Aml Instance application and its connectivity endpoint URI. +class AmlUserFeature(_serialization.Model): + """Features enabled for a workspace. - :param display_name: Name of the ComputeInstance application. - :type display_name: str - :param endpoint_uri: Application' endpoint URI. - :type endpoint_uri: str + :ivar id: Specifies the feature ID. + :vartype id: str + :ivar display_name: Specifies the feature name. + :vartype display_name: str + :ivar description: Describes the feature for user experience. + :vartype description: str """ _attribute_map = { - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "description": {"key": "description", "type": "str"}, } def __init__( self, *, + id: Optional[str] = None, # pylint: disable=redefined-builtin display_name: Optional[str] = None, - endpoint_uri: Optional[str] = None, + description: Optional[str] = None, **kwargs ): - super(ComputeInstanceApplication, self).__init__(**kwargs) + """ + :keyword id: Specifies the feature ID. + :paramtype id: str + :keyword display_name: Specifies the feature name. + :paramtype display_name: str + :keyword description: Describes the feature for user experience. + :paramtype description: str + """ + super().__init__(**kwargs) + self.id = id self.display_name = display_name - self.endpoint_uri = endpoint_uri - + self.description = description -class ComputeInstanceConnectivityEndpoints(msrest.serialization.Model): - """Defines all connectivity endpoints and properties for a ComputeInstance. - Variables are only populated by the server, and will be ignored when sending a request. +class ArmResourceId(_serialization.Model): + """ARM ResourceId of a resource. - :ivar public_ip_address: Public IP Address of this ComputeInstance. - :vartype public_ip_address: str - :ivar private_ip_address: Private IP Address of this ComputeInstance (local to the VNET in - which the compute instance is deployed). - :vartype private_ip_address: str + :ivar resource_id: Arm ResourceId is in the format + "/subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroupName}/providers/Microsoft.Storage/storageAccounts/{StorageAccountName}" + or + "/subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{AcrName}". + :vartype resource_id: str """ - _validation = { - 'public_ip_address': {'readonly': True}, - 'private_ip_address': {'readonly': True}, + _attribute_map = { + "resource_id": {"key": "resourceId", "type": "str"}, } + def __init__(self, *, resource_id: Optional[str] = None, **kwargs): + """ + :keyword resource_id: Arm ResourceId is in the format + "/subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroupName}/providers/Microsoft.Storage/storageAccounts/{StorageAccountName}" + or + "/subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{AcrName}". + :paramtype resource_id: str + """ + super().__init__(**kwargs) + self.resource_id = resource_id + + +class ResourceBase(_serialization.Model): + """ResourceBase. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + """ + _attribute_map = { - 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, - 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, } def __init__( self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, **kwargs ): - super(ComputeInstanceConnectivityEndpoints, self).__init__(**kwargs) - self.public_ip_address = None - self.private_ip_address = None + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + """ + super().__init__(**kwargs) + self.description = description + self.properties = properties + self.tags = tags -class ComputeInstanceCreatedBy(msrest.serialization.Model): - """Describes information on user who created this ComputeInstance. +class AssetBase(ResourceBase): + """AssetBase. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_anonymous: If the name version are system generated (anonymous registration). + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + """ + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_anonymous: bool = False, + is_archived: bool = False, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_anonymous: If the name version are system generated (anonymous registration). + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + """ + super().__init__(description=description, properties=properties, tags=tags, **kwargs) + self.is_anonymous = is_anonymous + self.is_archived = is_archived + + +class AssetContainer(ResourceBase): + """AssetContainer. Variables are only populated by the server, and will be ignored when sending a request. - :ivar user_name: Name of the user. - :vartype user_name: str - :ivar user_org_id: Uniquely identifies user' Azure Active Directory organization. - :vartype user_org_id: str - :ivar user_id: Uniquely identifies the user within his/her organization. - :vartype user_id: str + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar latest_version: The latest version inside this container. + :vartype latest_version: str + :ivar next_version: The next auto incremental version. + :vartype next_version: str """ _validation = { - 'user_name': {'readonly': True}, - 'user_org_id': {'readonly': True}, - 'user_id': {'readonly': True}, + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, } _attribute_map = { - 'user_name': {'key': 'userName', 'type': 'str'}, - 'user_org_id': {'key': 'userOrgId', 'type': 'str'}, - 'user_id': {'key': 'userId', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, } def __init__( self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_archived: bool = False, **kwargs ): - super(ComputeInstanceCreatedBy, self).__init__(**kwargs) - self.user_name = None - self.user_org_id = None - self.user_id = None + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + """ + super().__init__(description=description, properties=properties, tags=tags, **kwargs) + self.is_archived = is_archived + self.latest_version = None + self.next_version = None + + +class AssetJobInput(_serialization.Model): + """Asset input type. + All required parameters must be populated in order to send to Azure. -class ComputeInstanceLastOperation(msrest.serialization.Model): - """The last operation on ComputeInstance. + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :ivar uri: [Required] Input Asset URI. Required. + :vartype uri: str + """ - :param operation_name: Name of the last operation. Possible values include: "Create", "Start", - "Stop", "Restart", "Reimage", "Delete". - :type operation_name: str or ~azure.mgmt.machinelearningservices.models.OperationName - :param operation_time: Time of the last operation. - :type operation_time: ~datetime.datetime - :param operation_status: Operation status. Possible values include: "InProgress", "Succeeded", - "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ReimageFailed", "DeleteFailed". - :type operation_status: str or ~azure.mgmt.machinelearningservices.models.OperationStatus + _validation = { + "uri": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + } + + def __init__(self, *, uri: str, mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, **kwargs): + """ + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :keyword uri: [Required] Input Asset URI. Required. + :paramtype uri: str + """ + super().__init__(**kwargs) + self.mode = mode + self.uri = uri + + +class AssetJobOutput(_serialization.Model): + """Asset output type. + + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :ivar uri: Output Asset URI. + :vartype uri: str """ _attribute_map = { - 'operation_name': {'key': 'operationName', 'type': 'str'}, - 'operation_time': {'key': 'operationTime', 'type': 'iso-8601'}, - 'operation_status': {'key': 'operationStatus', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( - self, - *, - operation_name: Optional[Union[str, "OperationName"]] = None, - operation_time: Optional[datetime.datetime] = None, - operation_status: Optional[Union[str, "OperationStatus"]] = None, - **kwargs + self, *, mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, uri: Optional[str] = None, **kwargs ): - super(ComputeInstanceLastOperation, self).__init__(**kwargs) - self.operation_name = operation_name - self.operation_time = operation_time - self.operation_status = operation_status + """ + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :keyword uri: Output Asset URI. + :paramtype uri: str + """ + super().__init__(**kwargs) + self.mode = mode + self.uri = uri -class ComputeInstanceProperties(msrest.serialization.Model): - """Compute Instance properties. +class AssetReferenceBase(_serialization.Model): + """Base definition for asset references. - Variables are only populated by the server, and will be ignored when sending a request. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + DataPathAssetReference, IdAssetReference, OutputPathAssetReference - :param vm_size: Virtual Machine Size. - :type vm_size: str - :param subnet: Virtual network subnet resource ID the compute nodes belong to. - :type subnet: ~azure.mgmt.machinelearningservices.models.ResourceId - :param application_sharing_policy: Policy for sharing applications on this compute instance - among users of parent workspace. If Personal, only the creator can access applications on this - compute instance. When Shared, any workspace user can access applications on this instance - depending on his/her assigned role. Possible values include: "Personal", "Shared". Default - value: "Shared". - :type application_sharing_policy: str or - ~azure.mgmt.machinelearningservices.models.ApplicationSharingPolicy - :param ssh_settings: Specifies policy and settings for SSH access. - :type ssh_settings: ~azure.mgmt.machinelearningservices.models.ComputeInstanceSshSettings - :ivar connectivity_endpoints: Describes all connectivity endpoints available for this - ComputeInstance. - :vartype connectivity_endpoints: - ~azure.mgmt.machinelearningservices.models.ComputeInstanceConnectivityEndpoints - :ivar applications: Describes available applications and their endpoints on this - ComputeInstance. - :vartype applications: - list[~azure.mgmt.machinelearningservices.models.ComputeInstanceApplication] - :ivar created_by: Describes information on user who created this ComputeInstance. - :vartype created_by: ~azure.mgmt.machinelearningservices.models.ComputeInstanceCreatedBy - :ivar errors: Collection of errors encountered on this ComputeInstance. - :vartype errors: list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar state: The current state of this ComputeInstance. Possible values include: "Creating", - "CreateFailed", "Deleting", "Running", "Restarting", "JobRunning", "SettingUp", "SetupFailed", - "Starting", "Stopped", "Stopping", "UserSettingUp", "UserSetupFailed", "Unknown", "Unusable". - :vartype state: str or ~azure.mgmt.machinelearningservices.models.ComputeInstanceState - :ivar last_operation: The last operation on ComputeInstance. - :vartype last_operation: - ~azure.mgmt.machinelearningservices.models.ComputeInstanceLastOperation + All required parameters must be populated in order to send to Azure. + + :ivar reference_type: [Required] Specifies the type of asset reference. Required. Known values + are: "Id", "DataPath", and "OutputPath". + :vartype reference_type: str or ~azure.mgmt.machinelearningservices.models.ReferenceType """ _validation = { - 'connectivity_endpoints': {'readonly': True}, - 'applications': {'readonly': True}, - 'created_by': {'readonly': True}, - 'errors': {'readonly': True}, - 'state': {'readonly': True}, - 'last_operation': {'readonly': True}, + "reference_type": {"required": True}, } _attribute_map = { - 'vm_size': {'key': 'vmSize', 'type': 'str'}, - 'subnet': {'key': 'subnet', 'type': 'ResourceId'}, - 'application_sharing_policy': {'key': 'applicationSharingPolicy', 'type': 'str'}, - 'ssh_settings': {'key': 'sshSettings', 'type': 'ComputeInstanceSshSettings'}, - 'connectivity_endpoints': {'key': 'connectivityEndpoints', 'type': 'ComputeInstanceConnectivityEndpoints'}, - 'applications': {'key': 'applications', 'type': '[ComputeInstanceApplication]'}, - 'created_by': {'key': 'createdBy', 'type': 'ComputeInstanceCreatedBy'}, - 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'}, - 'state': {'key': 'state', 'type': 'str'}, - 'last_operation': {'key': 'lastOperation', 'type': 'ComputeInstanceLastOperation'}, + "reference_type": {"key": "referenceType", "type": "str"}, } - def __init__( - self, - *, - vm_size: Optional[str] = None, - subnet: Optional["ResourceId"] = None, - application_sharing_policy: Optional[Union[str, "ApplicationSharingPolicy"]] = "Shared", - ssh_settings: Optional["ComputeInstanceSshSettings"] = None, - **kwargs - ): - super(ComputeInstanceProperties, self).__init__(**kwargs) - self.vm_size = vm_size - self.subnet = subnet - self.application_sharing_policy = application_sharing_policy - self.ssh_settings = ssh_settings - self.connectivity_endpoints = None - self.applications = None - self.created_by = None - self.errors = None - self.state = None - self.last_operation = None + _subtype_map = { + "reference_type": { + "DataPath": "DataPathAssetReference", + "Id": "IdAssetReference", + "OutputPath": "OutputPathAssetReference", + } + } + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.reference_type = None # type: Optional[str] -class ComputeInstanceSshSettings(msrest.serialization.Model): - """Specifies policy and settings for SSH access. - Variables are only populated by the server, and will be ignored when sending a request. +class AssignedUser(_serialization.Model): + """A user that can be assigned to a compute instance. - :param ssh_public_access: State of the public SSH port. Possible values are: Disabled - - Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the - public ssh port is open and accessible according to the VNet/subnet policy if applicable. - Possible values include: "Enabled", "Disabled". Default value: "Disabled". - :type ssh_public_access: str or ~azure.mgmt.machinelearningservices.models.SshPublicAccess - :ivar admin_user_name: Describes the admin user name. - :vartype admin_user_name: str - :ivar ssh_port: Describes the port for connecting through SSH. - :vartype ssh_port: int - :param admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen -t - rsa -b 2048" to generate your SSH key pairs. - :type admin_public_key: str + All required parameters must be populated in order to send to Azure. + + :ivar object_id: User’s AAD Object Id. Required. + :vartype object_id: str + :ivar tenant_id: User’s AAD Tenant Id. Required. + :vartype tenant_id: str """ _validation = { - 'admin_user_name': {'readonly': True}, - 'ssh_port': {'readonly': True}, + "object_id": {"required": True}, + "tenant_id": {"required": True}, } _attribute_map = { - 'ssh_public_access': {'key': 'sshPublicAccess', 'type': 'str'}, - 'admin_user_name': {'key': 'adminUserName', 'type': 'str'}, - 'ssh_port': {'key': 'sshPort', 'type': 'int'}, - 'admin_public_key': {'key': 'adminPublicKey', 'type': 'str'}, + "object_id": {"key": "objectId", "type": "str"}, + "tenant_id": {"key": "tenantId", "type": "str"}, } - def __init__( - self, - *, - ssh_public_access: Optional[Union[str, "SshPublicAccess"]] = "Disabled", - admin_public_key: Optional[str] = None, - **kwargs - ): - super(ComputeInstanceSshSettings, self).__init__(**kwargs) - self.ssh_public_access = ssh_public_access - self.admin_user_name = None - self.ssh_port = None - self.admin_public_key = admin_public_key + def __init__(self, *, object_id: str, tenant_id: str, **kwargs): + """ + :keyword object_id: User’s AAD Object Id. Required. + :paramtype object_id: str + :keyword tenant_id: User’s AAD Tenant Id. Required. + :paramtype tenant_id: str + """ + super().__init__(**kwargs) + self.object_id = object_id + self.tenant_id = tenant_id -class Resource(msrest.serialization.Model): - """Azure Resource Manager resource envelope. +class ForecastHorizon(_serialization.Model): + """The desired maximum forecast horizon in units of time-series frequency. - Variables are only populated by the server, and will be ignored when sending a request. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AutoForecastHorizon, CustomForecastHorizon - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar name: Specifies the name of the resource. - :vartype name: str - :param identity: The identity of the resource. - :type identity: ~azure.mgmt.machinelearningservices.models.Identity - :param location: Specifies the location of the resource. - :type location: str - :ivar type: Specifies the type of the resource. - :vartype type: str - :param tags: A set of tags. Contains resource tags defined as key/value pairs. - :type tags: dict[str, str] - :param sku: The sku of the workspace. - :type sku: ~azure.mgmt.machinelearningservices.models.Sku + All required parameters must be populated in order to send to Azure. + + :ivar mode: [Required] Set forecast horizon value selection mode. Required. Known values are: + "Auto" and "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.ForecastHorizonMode """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, + "mode": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, + "mode": {"key": "mode", "type": "str"}, } - def __init__( - self, - *, - identity: Optional["Identity"] = None, - location: Optional[str] = None, - tags: Optional[Dict[str, str]] = None, - sku: Optional["Sku"] = None, - **kwargs - ): - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.identity = identity - self.location = location - self.type = None - self.tags = tags - self.sku = sku + _subtype_map = {"mode": {"Auto": "AutoForecastHorizon", "Custom": "CustomForecastHorizon"}} + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.mode = None # type: Optional[str] -class ComputeResource(Resource): - """Machine Learning compute object wrapped into ARM resource envelope. - Variables are only populated by the server, and will be ignored when sending a request. +class AutoForecastHorizon(ForecastHorizon): + """Forecast horizon determined automatically by system. - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar name: Specifies the name of the resource. - :vartype name: str - :param identity: The identity of the resource. - :type identity: ~azure.mgmt.machinelearningservices.models.Identity - :param location: Specifies the location of the resource. - :type location: str - :ivar type: Specifies the type of the resource. - :vartype type: str - :param tags: A set of tags. Contains resource tags defined as key/value pairs. - :type tags: dict[str, str] - :param sku: The sku of the workspace. - :type sku: ~azure.mgmt.machinelearningservices.models.Sku - :param properties: Compute properties. - :type properties: ~azure.mgmt.machinelearningservices.models.Compute + All required parameters must be populated in order to send to Azure. + + :ivar mode: [Required] Set forecast horizon value selection mode. Required. Known values are: + "Auto" and "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.ForecastHorizonMode """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, + "mode": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'properties': {'key': 'properties', 'type': 'Compute'}, + "mode": {"key": "mode", "type": "str"}, } - def __init__( - self, - *, - identity: Optional["Identity"] = None, - location: Optional[str] = None, - tags: Optional[Dict[str, str]] = None, - sku: Optional["Sku"] = None, - properties: Optional["Compute"] = None, - **kwargs - ): - super(ComputeResource, self).__init__(identity=identity, location=location, tags=tags, sku=sku, **kwargs) - self.properties = properties - + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.mode = "Auto" # type: str -class Databricks(Compute): - """A DataFactory compute. - Variables are only populated by the server, and will be ignored when sending a request. +class AutologgerSettings(_serialization.Model): + """Settings for Autologger. All required parameters must be populated in order to send to Azure. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :param properties: - :type properties: ~azure.mgmt.machinelearningservices.models.DatabricksProperties + :ivar mlflow_autologger: [Required] Indicates whether mlflow autologger is enabled. Required. + Known values are: "Enabled" and "Disabled". + :vartype mlflow_autologger: str or + ~azure.mgmt.machinelearningservices.models.MLFlowAutologgerState """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "mlflow_autologger": {"required": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'DatabricksProperties'}, + "mlflow_autologger": {"key": "mlflowAutologger", "type": "str"}, } - def __init__( - self, - *, - compute_location: Optional[str] = None, - description: Optional[str] = None, - resource_id: Optional[str] = None, - properties: Optional["DatabricksProperties"] = None, - **kwargs - ): - super(Databricks, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) - self.compute_type = 'Databricks' # type: str - self.properties = properties + def __init__(self, *, mlflow_autologger: Union[str, "_models.MLFlowAutologgerState"], **kwargs): + """ + :keyword mlflow_autologger: [Required] Indicates whether mlflow autologger is enabled. + Required. Known values are: "Enabled" and "Disabled". + :paramtype mlflow_autologger: str or + ~azure.mgmt.machinelearningservices.models.MLFlowAutologgerState + """ + super().__init__(**kwargs) + self.mlflow_autologger = mlflow_autologger -class DatabricksComputeSecrets(ComputeSecrets): - """Secrets related to a Machine Learning compute based on Databricks. +class JobBaseProperties(ResourceBase): # pylint: disable=too-many-instance-attributes + """Base definition for a job. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AutoMLJob, CommandJob, LabelingJobProperties, PipelineJob, SparkJob, SweepJob + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param databricks_access_token: access token for databricks account. - :type databricks_access_token: str + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar component_id: ARM resource ID of the component resource. + :vartype component_id: str + :ivar compute_id: ARM resource ID of the compute resource. + :vartype compute_id: str + :ivar display_name: Display name of job. + :vartype display_name: str + :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :vartype experiment_name: str + :ivar identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar job_type: [Required] Specifies the type of job. Required. Known values are: "AutoML", + "Command", "Labeling", "Sweep", "Pipeline", and "Spark". + :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :ivar services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", and "Scheduled". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus + """ + + _validation = { + "job_type": {"required": True}, + "status": {"readonly": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, + } + + _subtype_map = { + "job_type": { + "AutoML": "AutoMLJob", + "Command": "CommandJob", + "Labeling": "LabelingJobProperties", + "Pipeline": "PipelineJob", + "Spark": "SparkJob", + "Sweep": "SweepJob", + } + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + component_id: Optional[str] = None, + compute_id: Optional[str] = None, + display_name: Optional[str] = None, + experiment_name: str = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: bool = False, + services: Optional[Dict[str, "_models.JobService"]] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword component_id: ARM resource ID of the component resource. + :paramtype component_id: str + :keyword compute_id: ARM resource ID of the compute resource. + :paramtype compute_id: str + :keyword display_name: Display name of job. + :paramtype display_name: str + :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :paramtype experiment_name: str + :keyword identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + """ + super().__init__(description=description, properties=properties, tags=tags, **kwargs) + self.component_id = component_id + self.compute_id = compute_id + self.display_name = display_name + self.experiment_name = experiment_name + self.identity = identity + self.is_archived = is_archived + self.job_type = None # type: Optional[str] + self.services = services + self.status = None + + +class AutoMLJob(JobBaseProperties): # pylint: disable=too-many-instance-attributes + """AutoMLJob class. + Use this class for executing AutoML tasks like Classification/Regression etc. + See TaskType enum for all the tasks supported. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar component_id: ARM resource ID of the component resource. + :vartype component_id: str + :ivar compute_id: ARM resource ID of the compute resource. + :vartype compute_id: str + :ivar display_name: Display name of job. + :vartype display_name: str + :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :vartype experiment_name: str + :ivar identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar job_type: [Required] Specifies the type of job. Required. Known values are: "AutoML", + "Command", "Labeling", "Sweep", "Pipeline", and "Spark". + :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :ivar services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", and "Scheduled". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus + :ivar environment_id: The ARM resource ID of the Environment specification for the job. + This is optional value to provide, if not provided, AutoML will default this to Production + AutoML curated environment version when running the job. + :vartype environment_id: str + :ivar environment_variables: Environment variables included in the job. + :vartype environment_variables: dict[str, str] + :ivar outputs: Mapping of output data bindings used in the job. + :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :ivar resources: Compute Resource configuration for the job. + :vartype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration + :ivar task_details: [Required] This represents scenario which can be one of Tables/NLP/Image. + Required. + :vartype task_details: ~azure.mgmt.machinelearningservices.models.AutoMLVertical + """ + + _validation = { + "job_type": {"required": True}, + "status": {"readonly": True}, + "task_details": {"required": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "outputs": {"key": "outputs", "type": "{JobOutput}"}, + "resources": {"key": "resources", "type": "JobResourceConfiguration"}, + "task_details": {"key": "taskDetails", "type": "AutoMLVertical"}, + } + + def __init__( + self, + *, + task_details: "_models.AutoMLVertical", + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + component_id: Optional[str] = None, + compute_id: Optional[str] = None, + display_name: Optional[str] = None, + experiment_name: str = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: bool = False, + services: Optional[Dict[str, "_models.JobService"]] = None, + environment_id: Optional[str] = None, + environment_variables: Optional[Dict[str, str]] = None, + outputs: Optional[Dict[str, "_models.JobOutput"]] = None, + resources: Optional["_models.JobResourceConfiguration"] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword component_id: ARM resource ID of the component resource. + :paramtype component_id: str + :keyword compute_id: ARM resource ID of the compute resource. + :paramtype compute_id: str + :keyword display_name: Display name of job. + :paramtype display_name: str + :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :paramtype experiment_name: str + :keyword identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :keyword environment_id: The ARM resource ID of the Environment specification for the job. + This is optional value to provide, if not provided, AutoML will default this to Production + AutoML curated environment version when running the job. + :paramtype environment_id: str + :keyword environment_variables: Environment variables included in the job. + :paramtype environment_variables: dict[str, str] + :keyword outputs: Mapping of output data bindings used in the job. + :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :keyword resources: Compute Resource configuration for the job. + :paramtype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration + :keyword task_details: [Required] This represents scenario which can be one of + Tables/NLP/Image. Required. + :paramtype task_details: ~azure.mgmt.machinelearningservices.models.AutoMLVertical + """ + super().__init__( + description=description, + properties=properties, + tags=tags, + component_id=component_id, + compute_id=compute_id, + display_name=display_name, + experiment_name=experiment_name, + identity=identity, + is_archived=is_archived, + services=services, + **kwargs + ) + self.job_type = "AutoML" # type: str + self.environment_id = environment_id + self.environment_variables = environment_variables + self.outputs = outputs + self.resources = resources + self.task_details = task_details + + +class AutoMLVertical(_serialization.Model): + """AutoML vertical class. + Base class for AutoML verticals - TableVertical/ImageVertical/NLPVertical. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + Classification, Forecasting, ImageClassification, ImageClassificationMultilabel, + ImageInstanceSegmentation, ImageObjectDetection, Regression, TextClassification, + TextClassificationMultilabel, TextNer + + All required parameters must be populated in order to send to Azure. + + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + """ + + _validation = { + "task_type": {"required": True}, + "training_data": {"required": True}, + } + + _attribute_map = { + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + } + + _subtype_map = { + "task_type": { + "Classification": "Classification", + "Forecasting": "Forecasting", + "ImageClassification": "ImageClassification", + "ImageClassificationMultilabel": "ImageClassificationMultilabel", + "ImageInstanceSegmentation": "ImageInstanceSegmentation", + "ImageObjectDetection": "ImageObjectDetection", + "Regression": "Regression", + "TextClassification": "TextClassification", + "TextClassificationMultilabel": "TextClassificationMultilabel", + "TextNER": "TextNer", + } + } + + def __init__( + self, + *, + training_data: "_models.MLTableJobInput", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + **kwargs + ): + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + """ + super().__init__(**kwargs) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type = None # type: Optional[str] + self.training_data = training_data + + +class NCrossValidations(_serialization.Model): + """N-Cross validations value. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AutoNCrossValidations, CustomNCrossValidations + + All required parameters must be populated in order to send to Azure. + + :ivar mode: [Required] Mode for determining N-Cross validations. Required. Known values are: + "Auto" and "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.NCrossValidationsMode + """ + + _validation = { + "mode": {"required": True}, + } + + _attribute_map = { + "mode": {"key": "mode", "type": "str"}, + } + + _subtype_map = {"mode": {"Auto": "AutoNCrossValidations", "Custom": "CustomNCrossValidations"}} + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.mode = None # type: Optional[str] + + +class AutoNCrossValidations(NCrossValidations): + """N-Cross validations determined automatically. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: [Required] Mode for determining N-Cross validations. Required. Known values are: + "Auto" and "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.NCrossValidationsMode + """ + + _validation = { + "mode": {"required": True}, + } + + _attribute_map = { + "mode": {"key": "mode", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.mode = "Auto" # type: str + + +class AutoPauseProperties(_serialization.Model): + """Auto pause properties. + + :ivar delay_in_minutes: + :vartype delay_in_minutes: int + :ivar enabled: + :vartype enabled: bool + """ + + _attribute_map = { + "delay_in_minutes": {"key": "delayInMinutes", "type": "int"}, + "enabled": {"key": "enabled", "type": "bool"}, + } + + def __init__(self, *, delay_in_minutes: Optional[int] = None, enabled: Optional[bool] = None, **kwargs): + """ + :keyword delay_in_minutes: + :paramtype delay_in_minutes: int + :keyword enabled: + :paramtype enabled: bool + """ + super().__init__(**kwargs) + self.delay_in_minutes = delay_in_minutes + self.enabled = enabled + + +class AutoScaleProperties(_serialization.Model): + """Auto scale properties. + + :ivar min_node_count: + :vartype min_node_count: int + :ivar enabled: + :vartype enabled: bool + :ivar max_node_count: + :vartype max_node_count: int + """ + + _attribute_map = { + "min_node_count": {"key": "minNodeCount", "type": "int"}, + "enabled": {"key": "enabled", "type": "bool"}, + "max_node_count": {"key": "maxNodeCount", "type": "int"}, + } + + def __init__( + self, + *, + min_node_count: Optional[int] = None, + enabled: Optional[bool] = None, + max_node_count: Optional[int] = None, + **kwargs + ): + """ + :keyword min_node_count: + :paramtype min_node_count: int + :keyword enabled: + :paramtype enabled: bool + :keyword max_node_count: + :paramtype max_node_count: int + """ + super().__init__(**kwargs) + self.min_node_count = min_node_count + self.enabled = enabled + self.max_node_count = max_node_count + + +class Seasonality(_serialization.Model): + """Forecasting seasonality. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AutoSeasonality, CustomSeasonality + + All required parameters must be populated in order to send to Azure. + + :ivar mode: [Required] Seasonality mode. Required. Known values are: "Auto" and "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.SeasonalityMode + """ + + _validation = { + "mode": {"required": True}, + } + + _attribute_map = { + "mode": {"key": "mode", "type": "str"}, + } + + _subtype_map = {"mode": {"Auto": "AutoSeasonality", "Custom": "CustomSeasonality"}} + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.mode = None # type: Optional[str] + + +class AutoSeasonality(Seasonality): + """AutoSeasonality. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: [Required] Seasonality mode. Required. Known values are: "Auto" and "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.SeasonalityMode + """ + + _validation = { + "mode": {"required": True}, + } + + _attribute_map = { + "mode": {"key": "mode", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.mode = "Auto" # type: str + + +class TargetLags(_serialization.Model): + """The number of past periods to lag from the target column. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AutoTargetLags, CustomTargetLags + + All required parameters must be populated in order to send to Azure. + + :ivar mode: [Required] Set target lags mode - Auto/Custom. Required. Known values are: "Auto" + and "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetLagsMode + """ + + _validation = { + "mode": {"required": True}, + } + + _attribute_map = { + "mode": {"key": "mode", "type": "str"}, + } + + _subtype_map = {"mode": {"Auto": "AutoTargetLags", "Custom": "CustomTargetLags"}} + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.mode = None # type: Optional[str] + + +class AutoTargetLags(TargetLags): + """AutoTargetLags. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: [Required] Set target lags mode - Auto/Custom. Required. Known values are: "Auto" + and "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetLagsMode + """ + + _validation = { + "mode": {"required": True}, + } + + _attribute_map = { + "mode": {"key": "mode", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.mode = "Auto" # type: str + + +class TargetRollingWindowSize(_serialization.Model): + """Forecasting target rolling window size. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AutoTargetRollingWindowSize, CustomTargetRollingWindowSize + + All required parameters must be populated in order to send to Azure. + + :ivar mode: [Required] TargetRollingWindowSiz detection mode. Required. Known values are: + "Auto" and "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetRollingWindowSizeMode + """ + + _validation = { + "mode": {"required": True}, + } + + _attribute_map = { + "mode": {"key": "mode", "type": "str"}, + } + + _subtype_map = {"mode": {"Auto": "AutoTargetRollingWindowSize", "Custom": "CustomTargetRollingWindowSize"}} + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.mode = None # type: Optional[str] + + +class AutoTargetRollingWindowSize(TargetRollingWindowSize): + """Target lags rolling window determined automatically. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: [Required] TargetRollingWindowSiz detection mode. Required. Known values are: + "Auto" and "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetRollingWindowSizeMode + """ + + _validation = { + "mode": {"required": True}, + } + + _attribute_map = { + "mode": {"key": "mode", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.mode = "Auto" # type: str + + +class DatastoreProperties(ResourceBase): + """Base definition for datastore contents configuration. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzureBlobDatastore, AzureDataLakeGen1Datastore, AzureDataLakeGen2Datastore, AzureFileDatastore, + HdfsDatastore + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar credentials: [Required] Account credentials. Required. + :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :ivar datastore_type: [Required] Storage type backing the datastore. Required. Known values + are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile", and "Hdfs". + :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType + :ivar is_default: Readonly property to indicate if datastore is the workspace default + datastore. + :vartype is_default: bool + """ + + _validation = { + "credentials": {"required": True}, + "datastore_type": {"required": True}, + "is_default": {"readonly": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, + "datastore_type": {"key": "datastoreType", "type": "str"}, + "is_default": {"key": "isDefault", "type": "bool"}, + } + + _subtype_map = { + "datastore_type": { + "AzureBlob": "AzureBlobDatastore", + "AzureDataLakeGen1": "AzureDataLakeGen1Datastore", + "AzureDataLakeGen2": "AzureDataLakeGen2Datastore", + "AzureFile": "AzureFileDatastore", + "Hdfs": "HdfsDatastore", + } + } + + def __init__( + self, + *, + credentials: "_models.DatastoreCredentials", + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword credentials: [Required] Account credentials. Required. + :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + """ + super().__init__(description=description, properties=properties, tags=tags, **kwargs) + self.credentials = credentials + self.datastore_type = None # type: Optional[str] + self.is_default = None + + +class AzureDatastore(_serialization.Model): + """Base definition for Azure datastore contents configuration. + + :ivar resource_group: Azure Resource Group name. + :vartype resource_group: str + :ivar subscription_id: Azure Subscription Id. + :vartype subscription_id: str + """ + + _attribute_map = { + "resource_group": {"key": "resourceGroup", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, + } + + def __init__(self, *, resource_group: Optional[str] = None, subscription_id: Optional[str] = None, **kwargs): + """ + :keyword resource_group: Azure Resource Group name. + :paramtype resource_group: str + :keyword subscription_id: Azure Subscription Id. + :paramtype subscription_id: str + """ + super().__init__(**kwargs) + self.resource_group = resource_group + self.subscription_id = subscription_id + + +class AzureBlobDatastore(AzureDatastore, DatastoreProperties): # pylint: disable=too-many-instance-attributes + """Azure Blob datastore configuration. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar credentials: [Required] Account credentials. Required. + :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :ivar datastore_type: [Required] Storage type backing the datastore. Required. Known values + are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile", and "Hdfs". + :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType + :ivar is_default: Readonly property to indicate if datastore is the workspace default + datastore. + :vartype is_default: bool + :ivar resource_group: Azure Resource Group name. + :vartype resource_group: str + :ivar subscription_id: Azure Subscription Id. + :vartype subscription_id: str + :ivar account_name: Storage account name. + :vartype account_name: str + :ivar container_name: Storage account container name. + :vartype container_name: str + :ivar endpoint: Azure cloud endpoint for the storage account. + :vartype endpoint: str + :ivar protocol: Protocol used to communicate with the storage account. + :vartype protocol: str + :ivar service_data_access_auth_identity: Indicates which identity to use to authenticate + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". + :vartype service_data_access_auth_identity: str or + ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity + """ + + _validation = { + "credentials": {"required": True}, + "datastore_type": {"required": True}, + "is_default": {"readonly": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, + "datastore_type": {"key": "datastoreType", "type": "str"}, + "is_default": {"key": "isDefault", "type": "bool"}, + "resource_group": {"key": "resourceGroup", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, + "account_name": {"key": "accountName", "type": "str"}, + "container_name": {"key": "containerName", "type": "str"}, + "endpoint": {"key": "endpoint", "type": "str"}, + "protocol": {"key": "protocol", "type": "str"}, + "service_data_access_auth_identity": {"key": "serviceDataAccessAuthIdentity", "type": "str"}, + } + + def __init__( + self, + *, + credentials: "_models.DatastoreCredentials", + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + resource_group: Optional[str] = None, + subscription_id: Optional[str] = None, + account_name: Optional[str] = None, + container_name: Optional[str] = None, + endpoint: Optional[str] = None, + protocol: Optional[str] = None, + service_data_access_auth_identity: Optional[Union[str, "_models.ServiceDataAccessAuthIdentity"]] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword credentials: [Required] Account credentials. Required. + :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :keyword resource_group: Azure Resource Group name. + :paramtype resource_group: str + :keyword subscription_id: Azure Subscription Id. + :paramtype subscription_id: str + :keyword account_name: Storage account name. + :paramtype account_name: str + :keyword container_name: Storage account container name. + :paramtype container_name: str + :keyword endpoint: Azure cloud endpoint for the storage account. + :paramtype endpoint: str + :keyword protocol: Protocol used to communicate with the storage account. + :paramtype protocol: str + :keyword service_data_access_auth_identity: Indicates which identity to use to authenticate + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". + :paramtype service_data_access_auth_identity: str or + ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity + """ + super().__init__( + resource_group=resource_group, + subscription_id=subscription_id, + description=description, + properties=properties, + tags=tags, + credentials=credentials, + **kwargs + ) + self.description = description + self.properties = properties + self.tags = tags + self.credentials = credentials + self.datastore_type = "AzureBlob" # type: str + self.is_default = None + self.account_name = account_name + self.container_name = container_name + self.endpoint = endpoint + self.protocol = protocol + self.service_data_access_auth_identity = service_data_access_auth_identity + self.resource_group = resource_group + self.subscription_id = subscription_id + + +class AzureDataLakeGen1Datastore(AzureDatastore, DatastoreProperties): + """Azure Data Lake Gen1 datastore configuration. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar credentials: [Required] Account credentials. Required. + :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :ivar datastore_type: [Required] Storage type backing the datastore. Required. Known values + are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile", and "Hdfs". + :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType + :ivar is_default: Readonly property to indicate if datastore is the workspace default + datastore. + :vartype is_default: bool + :ivar resource_group: Azure Resource Group name. + :vartype resource_group: str + :ivar subscription_id: Azure Subscription Id. + :vartype subscription_id: str + :ivar service_data_access_auth_identity: Indicates which identity to use to authenticate + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". + :vartype service_data_access_auth_identity: str or + ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity + :ivar store_name: [Required] Azure Data Lake store name. Required. + :vartype store_name: str + """ + + _validation = { + "credentials": {"required": True}, + "datastore_type": {"required": True}, + "is_default": {"readonly": True}, + "store_name": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, + "datastore_type": {"key": "datastoreType", "type": "str"}, + "is_default": {"key": "isDefault", "type": "bool"}, + "resource_group": {"key": "resourceGroup", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, + "service_data_access_auth_identity": {"key": "serviceDataAccessAuthIdentity", "type": "str"}, + "store_name": {"key": "storeName", "type": "str"}, + } + + def __init__( + self, + *, + credentials: "_models.DatastoreCredentials", + store_name: str, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + resource_group: Optional[str] = None, + subscription_id: Optional[str] = None, + service_data_access_auth_identity: Optional[Union[str, "_models.ServiceDataAccessAuthIdentity"]] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword credentials: [Required] Account credentials. Required. + :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :keyword resource_group: Azure Resource Group name. + :paramtype resource_group: str + :keyword subscription_id: Azure Subscription Id. + :paramtype subscription_id: str + :keyword service_data_access_auth_identity: Indicates which identity to use to authenticate + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". + :paramtype service_data_access_auth_identity: str or + ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity + :keyword store_name: [Required] Azure Data Lake store name. Required. + :paramtype store_name: str + """ + super().__init__( + resource_group=resource_group, + subscription_id=subscription_id, + description=description, + properties=properties, + tags=tags, + credentials=credentials, + **kwargs + ) + self.description = description + self.properties = properties + self.tags = tags + self.credentials = credentials + self.datastore_type = "AzureDataLakeGen1" # type: str + self.is_default = None + self.service_data_access_auth_identity = service_data_access_auth_identity + self.store_name = store_name + self.resource_group = resource_group + self.subscription_id = subscription_id + + +class AzureDataLakeGen2Datastore(AzureDatastore, DatastoreProperties): # pylint: disable=too-many-instance-attributes + """Azure Data Lake Gen2 datastore configuration. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar credentials: [Required] Account credentials. Required. + :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :ivar datastore_type: [Required] Storage type backing the datastore. Required. Known values + are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile", and "Hdfs". + :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType + :ivar is_default: Readonly property to indicate if datastore is the workspace default + datastore. + :vartype is_default: bool + :ivar resource_group: Azure Resource Group name. + :vartype resource_group: str + :ivar subscription_id: Azure Subscription Id. + :vartype subscription_id: str + :ivar account_name: [Required] Storage account name. Required. + :vartype account_name: str + :ivar endpoint: Azure cloud endpoint for the storage account. + :vartype endpoint: str + :ivar filesystem: [Required] The name of the Data Lake Gen2 filesystem. Required. + :vartype filesystem: str + :ivar protocol: Protocol used to communicate with the storage account. + :vartype protocol: str + :ivar service_data_access_auth_identity: Indicates which identity to use to authenticate + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". + :vartype service_data_access_auth_identity: str or + ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity + """ + + _validation = { + "credentials": {"required": True}, + "datastore_type": {"required": True}, + "is_default": {"readonly": True}, + "account_name": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + "filesystem": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, + "datastore_type": {"key": "datastoreType", "type": "str"}, + "is_default": {"key": "isDefault", "type": "bool"}, + "resource_group": {"key": "resourceGroup", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, + "account_name": {"key": "accountName", "type": "str"}, + "endpoint": {"key": "endpoint", "type": "str"}, + "filesystem": {"key": "filesystem", "type": "str"}, + "protocol": {"key": "protocol", "type": "str"}, + "service_data_access_auth_identity": {"key": "serviceDataAccessAuthIdentity", "type": "str"}, + } + + def __init__( + self, + *, + credentials: "_models.DatastoreCredentials", + account_name: str, + filesystem: str, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + resource_group: Optional[str] = None, + subscription_id: Optional[str] = None, + endpoint: Optional[str] = None, + protocol: Optional[str] = None, + service_data_access_auth_identity: Optional[Union[str, "_models.ServiceDataAccessAuthIdentity"]] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword credentials: [Required] Account credentials. Required. + :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :keyword resource_group: Azure Resource Group name. + :paramtype resource_group: str + :keyword subscription_id: Azure Subscription Id. + :paramtype subscription_id: str + :keyword account_name: [Required] Storage account name. Required. + :paramtype account_name: str + :keyword endpoint: Azure cloud endpoint for the storage account. + :paramtype endpoint: str + :keyword filesystem: [Required] The name of the Data Lake Gen2 filesystem. Required. + :paramtype filesystem: str + :keyword protocol: Protocol used to communicate with the storage account. + :paramtype protocol: str + :keyword service_data_access_auth_identity: Indicates which identity to use to authenticate + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". + :paramtype service_data_access_auth_identity: str or + ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity + """ + super().__init__( + resource_group=resource_group, + subscription_id=subscription_id, + description=description, + properties=properties, + tags=tags, + credentials=credentials, + **kwargs + ) + self.description = description + self.properties = properties + self.tags = tags + self.credentials = credentials + self.datastore_type = "AzureDataLakeGen2" # type: str + self.is_default = None + self.account_name = account_name + self.endpoint = endpoint + self.filesystem = filesystem + self.protocol = protocol + self.service_data_access_auth_identity = service_data_access_auth_identity + self.resource_group = resource_group + self.subscription_id = subscription_id + + +class AzureFileDatastore(AzureDatastore, DatastoreProperties): # pylint: disable=too-many-instance-attributes + """Azure File datastore configuration. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar credentials: [Required] Account credentials. Required. + :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :ivar datastore_type: [Required] Storage type backing the datastore. Required. Known values + are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile", and "Hdfs". + :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType + :ivar is_default: Readonly property to indicate if datastore is the workspace default + datastore. + :vartype is_default: bool + :ivar resource_group: Azure Resource Group name. + :vartype resource_group: str + :ivar subscription_id: Azure Subscription Id. + :vartype subscription_id: str + :ivar account_name: [Required] Storage account name. Required. + :vartype account_name: str + :ivar endpoint: Azure cloud endpoint for the storage account. + :vartype endpoint: str + :ivar file_share_name: [Required] The name of the Azure file share that the datastore points + to. Required. + :vartype file_share_name: str + :ivar protocol: Protocol used to communicate with the storage account. + :vartype protocol: str + :ivar service_data_access_auth_identity: Indicates which identity to use to authenticate + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". + :vartype service_data_access_auth_identity: str or + ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity + """ + + _validation = { + "credentials": {"required": True}, + "datastore_type": {"required": True}, + "is_default": {"readonly": True}, + "account_name": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + "file_share_name": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, + "datastore_type": {"key": "datastoreType", "type": "str"}, + "is_default": {"key": "isDefault", "type": "bool"}, + "resource_group": {"key": "resourceGroup", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, + "account_name": {"key": "accountName", "type": "str"}, + "endpoint": {"key": "endpoint", "type": "str"}, + "file_share_name": {"key": "fileShareName", "type": "str"}, + "protocol": {"key": "protocol", "type": "str"}, + "service_data_access_auth_identity": {"key": "serviceDataAccessAuthIdentity", "type": "str"}, + } + + def __init__( + self, + *, + credentials: "_models.DatastoreCredentials", + account_name: str, + file_share_name: str, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + resource_group: Optional[str] = None, + subscription_id: Optional[str] = None, + endpoint: Optional[str] = None, + protocol: Optional[str] = None, + service_data_access_auth_identity: Optional[Union[str, "_models.ServiceDataAccessAuthIdentity"]] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword credentials: [Required] Account credentials. Required. + :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :keyword resource_group: Azure Resource Group name. + :paramtype resource_group: str + :keyword subscription_id: Azure Subscription Id. + :paramtype subscription_id: str + :keyword account_name: [Required] Storage account name. Required. + :paramtype account_name: str + :keyword endpoint: Azure cloud endpoint for the storage account. + :paramtype endpoint: str + :keyword file_share_name: [Required] The name of the Azure file share that the datastore points + to. Required. + :paramtype file_share_name: str + :keyword protocol: Protocol used to communicate with the storage account. + :paramtype protocol: str + :keyword service_data_access_auth_identity: Indicates which identity to use to authenticate + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". + :paramtype service_data_access_auth_identity: str or + ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity + """ + super().__init__( + resource_group=resource_group, + subscription_id=subscription_id, + description=description, + properties=properties, + tags=tags, + credentials=credentials, + **kwargs + ) + self.description = description + self.properties = properties + self.tags = tags + self.credentials = credentials + self.datastore_type = "AzureFile" # type: str + self.is_default = None + self.account_name = account_name + self.endpoint = endpoint + self.file_share_name = file_share_name + self.protocol = protocol + self.service_data_access_auth_identity = service_data_access_auth_identity + self.resource_group = resource_group + self.subscription_id = subscription_id + + +class EarlyTerminationPolicy(_serialization.Model): + """Early termination policies enable canceling poor-performing runs before they complete. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + BanditPolicy, MedianStoppingPolicy, TruncationSelectionPolicy + + All required parameters must be populated in order to send to Azure. + + :ivar delay_evaluation: Number of intervals by which to delay the first evaluation. + :vartype delay_evaluation: int + :ivar evaluation_interval: Interval (number of runs) between policy evaluations. + :vartype evaluation_interval: int + :ivar policy_type: [Required] Name of policy configuration. Required. Known values are: + "Bandit", "MedianStopping", and "TruncationSelection". + :vartype policy_type: str or + ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicyType + """ + + _validation = { + "policy_type": {"required": True}, + } + + _attribute_map = { + "delay_evaluation": {"key": "delayEvaluation", "type": "int"}, + "evaluation_interval": {"key": "evaluationInterval", "type": "int"}, + "policy_type": {"key": "policyType", "type": "str"}, + } + + _subtype_map = { + "policy_type": { + "Bandit": "BanditPolicy", + "MedianStopping": "MedianStoppingPolicy", + "TruncationSelection": "TruncationSelectionPolicy", + } + } + + def __init__(self, *, delay_evaluation: int = 0, evaluation_interval: int = 0, **kwargs): + """ + :keyword delay_evaluation: Number of intervals by which to delay the first evaluation. + :paramtype delay_evaluation: int + :keyword evaluation_interval: Interval (number of runs) between policy evaluations. + :paramtype evaluation_interval: int + """ + super().__init__(**kwargs) + self.delay_evaluation = delay_evaluation + self.evaluation_interval = evaluation_interval + self.policy_type = None # type: Optional[str] + + +class BanditPolicy(EarlyTerminationPolicy): + """Defines an early termination policy based on slack criteria, and a frequency and delay interval for evaluation. + + All required parameters must be populated in order to send to Azure. + + :ivar delay_evaluation: Number of intervals by which to delay the first evaluation. + :vartype delay_evaluation: int + :ivar evaluation_interval: Interval (number of runs) between policy evaluations. + :vartype evaluation_interval: int + :ivar policy_type: [Required] Name of policy configuration. Required. Known values are: + "Bandit", "MedianStopping", and "TruncationSelection". + :vartype policy_type: str or + ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicyType + :ivar slack_amount: Absolute distance allowed from the best performing run. + :vartype slack_amount: float + :ivar slack_factor: Ratio of the allowed distance from the best performing run. + :vartype slack_factor: float + """ + + _validation = { + "policy_type": {"required": True}, + } + + _attribute_map = { + "delay_evaluation": {"key": "delayEvaluation", "type": "int"}, + "evaluation_interval": {"key": "evaluationInterval", "type": "int"}, + "policy_type": {"key": "policyType", "type": "str"}, + "slack_amount": {"key": "slackAmount", "type": "float"}, + "slack_factor": {"key": "slackFactor", "type": "float"}, + } + + def __init__( + self, + *, + delay_evaluation: int = 0, + evaluation_interval: int = 0, + slack_amount: float = 0, + slack_factor: float = 0, + **kwargs + ): + """ + :keyword delay_evaluation: Number of intervals by which to delay the first evaluation. + :paramtype delay_evaluation: int + :keyword evaluation_interval: Interval (number of runs) between policy evaluations. + :paramtype evaluation_interval: int + :keyword slack_amount: Absolute distance allowed from the best performing run. + :paramtype slack_amount: float + :keyword slack_factor: Ratio of the allowed distance from the best performing run. + :paramtype slack_factor: float + """ + super().__init__(delay_evaluation=delay_evaluation, evaluation_interval=evaluation_interval, **kwargs) + self.policy_type = "Bandit" # type: str + self.slack_amount = slack_amount + self.slack_factor = slack_factor + + +class Resource(_serialization.Model): + """Common fields that are returned in the response for all Azure Resource Manager resources. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.system_data = None + + +class TrackedResource(Resource): + """The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + } + + def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs): + """ + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + :keyword location: The geo-location where the resource lives. Required. + :paramtype location: str + """ + super().__init__(**kwargs) + self.tags = tags + self.location = location + + +class BatchDeployment(TrackedResource): + """BatchDeployment. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar identity: Managed service identity (system assigned and/or user assigned identities). + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :vartype kind: str + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.BatchDeploymentProperties + :ivar sku: Sku details required for ARM contract for Autoscaling. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "BatchDeploymentProperties"}, + "sku": {"key": "sku", "type": "Sku"}, + } + + def __init__( + self, + *, + location: str, + properties: "_models.BatchDeploymentProperties", + tags: Optional[Dict[str, str]] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, + kind: Optional[str] = None, + sku: Optional["_models.Sku"] = None, + **kwargs + ): + """ + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + :keyword location: The geo-location where the resource lives. Required. + :paramtype location: str + :keyword identity: Managed service identity (system assigned and/or user assigned identities). + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :paramtype kind: str + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.BatchDeploymentProperties + :keyword sku: Sku details required for ARM contract for Autoscaling. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + super().__init__(tags=tags, location=location, **kwargs) + self.identity = identity + self.kind = kind + self.properties = properties + self.sku = sku + + +class EndpointDeploymentPropertiesBase(_serialization.Model): + """Base definition for endpoint deployment. + + :ivar code_configuration: Code configuration for the endpoint deployment. + :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :ivar description: Description of the endpoint deployment. + :vartype description: str + :ivar environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :vartype environment_id: str + :ivar environment_variables: Environment variables configuration for the deployment. + :vartype environment_variables: dict[str, str] + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + """ + + _attribute_map = { + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, + "description": {"key": "description", "type": "str"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "properties": {"key": "properties", "type": "{str}"}, + } + + def __init__( + self, + *, + code_configuration: Optional["_models.CodeConfiguration"] = None, + description: Optional[str] = None, + environment_id: Optional[str] = None, + environment_variables: Optional[Dict[str, str]] = None, + properties: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword code_configuration: Code configuration for the endpoint deployment. + :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :keyword description: Description of the endpoint deployment. + :paramtype description: str + :keyword environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :paramtype environment_id: str + :keyword environment_variables: Environment variables configuration for the deployment. + :paramtype environment_variables: dict[str, str] + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + """ + super().__init__(**kwargs) + self.code_configuration = code_configuration + self.description = description + self.environment_id = environment_id + self.environment_variables = environment_variables + self.properties = properties + + +class BatchDeploymentProperties(EndpointDeploymentPropertiesBase): # pylint: disable=too-many-instance-attributes + """Batch inference settings per deployment. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code_configuration: Code configuration for the endpoint deployment. + :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :ivar description: Description of the endpoint deployment. + :vartype description: str + :ivar environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :vartype environment_id: str + :ivar environment_variables: Environment variables configuration for the deployment. + :vartype environment_variables: dict[str, str] + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar compute: Compute target for batch inference operation. + :vartype compute: str + :ivar error_threshold: Error threshold, if the error count for the entire input goes above this + value, + the batch inference will be aborted. Range is [-1, int.MaxValue]. + For FileDataset, this value is the count of file failures. + For TabularDataset, this value is the count of record failures. + If set to -1 (the lower bound), all failures during batch inference will be ignored. + :vartype error_threshold: int + :ivar logging_level: Logging level for batch inference operation. Known values are: "Info", + "Warning", and "Debug". + :vartype logging_level: str or ~azure.mgmt.machinelearningservices.models.BatchLoggingLevel + :ivar max_concurrency_per_instance: Indicates maximum number of parallelism per instance. + :vartype max_concurrency_per_instance: int + :ivar mini_batch_size: Size of the mini-batch passed to each batch invocation. + For FileDataset, this is the number of files per mini-batch. + For TabularDataset, this is the size of the records in bytes, per mini-batch. + :vartype mini_batch_size: int + :ivar model: Reference to the model asset for the endpoint deployment. + :vartype model: ~azure.mgmt.machinelearningservices.models.AssetReferenceBase + :ivar output_action: Indicates how the output will be organized. Known values are: + "SummaryOnly" and "AppendRow". + :vartype output_action: str or ~azure.mgmt.machinelearningservices.models.BatchOutputAction + :ivar output_file_name: Customized output file name for append_row output action. + :vartype output_file_name: str + :ivar provisioning_state: Provisioning state for the endpoint deployment. Known values are: + "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState + :ivar resources: Indicates compute configuration for the job. + If not provided, will default to the defaults defined in ResourceConfiguration. + :vartype resources: ~azure.mgmt.machinelearningservices.models.DeploymentResourceConfiguration + :ivar retry_settings: Retry Settings for the batch inference operation. + If not provided, will default to the defaults defined in BatchRetrySettings. + :vartype retry_settings: ~azure.mgmt.machinelearningservices.models.BatchRetrySettings + """ + + _validation = { + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, + "description": {"key": "description", "type": "str"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "properties": {"key": "properties", "type": "{str}"}, + "compute": {"key": "compute", "type": "str"}, + "error_threshold": {"key": "errorThreshold", "type": "int"}, + "logging_level": {"key": "loggingLevel", "type": "str"}, + "max_concurrency_per_instance": {"key": "maxConcurrencyPerInstance", "type": "int"}, + "mini_batch_size": {"key": "miniBatchSize", "type": "int"}, + "model": {"key": "model", "type": "AssetReferenceBase"}, + "output_action": {"key": "outputAction", "type": "str"}, + "output_file_name": {"key": "outputFileName", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "resources": {"key": "resources", "type": "DeploymentResourceConfiguration"}, + "retry_settings": {"key": "retrySettings", "type": "BatchRetrySettings"}, + } + + def __init__( + self, + *, + code_configuration: Optional["_models.CodeConfiguration"] = None, + description: Optional[str] = None, + environment_id: Optional[str] = None, + environment_variables: Optional[Dict[str, str]] = None, + properties: Optional[Dict[str, str]] = None, + compute: Optional[str] = None, + error_threshold: int = -1, + logging_level: Optional[Union[str, "_models.BatchLoggingLevel"]] = None, + max_concurrency_per_instance: int = 1, + mini_batch_size: int = 10, + model: Optional["_models.AssetReferenceBase"] = None, + output_action: Optional[Union[str, "_models.BatchOutputAction"]] = None, + output_file_name: str = "predictions.csv", + resources: Optional["_models.DeploymentResourceConfiguration"] = None, + retry_settings: Optional["_models.BatchRetrySettings"] = None, + **kwargs + ): + """ + :keyword code_configuration: Code configuration for the endpoint deployment. + :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :keyword description: Description of the endpoint deployment. + :paramtype description: str + :keyword environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :paramtype environment_id: str + :keyword environment_variables: Environment variables configuration for the deployment. + :paramtype environment_variables: dict[str, str] + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword compute: Compute target for batch inference operation. + :paramtype compute: str + :keyword error_threshold: Error threshold, if the error count for the entire input goes above + this value, + the batch inference will be aborted. Range is [-1, int.MaxValue]. + For FileDataset, this value is the count of file failures. + For TabularDataset, this value is the count of record failures. + If set to -1 (the lower bound), all failures during batch inference will be ignored. + :paramtype error_threshold: int + :keyword logging_level: Logging level for batch inference operation. Known values are: "Info", + "Warning", and "Debug". + :paramtype logging_level: str or ~azure.mgmt.machinelearningservices.models.BatchLoggingLevel + :keyword max_concurrency_per_instance: Indicates maximum number of parallelism per instance. + :paramtype max_concurrency_per_instance: int + :keyword mini_batch_size: Size of the mini-batch passed to each batch invocation. + For FileDataset, this is the number of files per mini-batch. + For TabularDataset, this is the size of the records in bytes, per mini-batch. + :paramtype mini_batch_size: int + :keyword model: Reference to the model asset for the endpoint deployment. + :paramtype model: ~azure.mgmt.machinelearningservices.models.AssetReferenceBase + :keyword output_action: Indicates how the output will be organized. Known values are: + "SummaryOnly" and "AppendRow". + :paramtype output_action: str or ~azure.mgmt.machinelearningservices.models.BatchOutputAction + :keyword output_file_name: Customized output file name for append_row output action. + :paramtype output_file_name: str + :keyword resources: Indicates compute configuration for the job. + If not provided, will default to the defaults defined in ResourceConfiguration. + :paramtype resources: + ~azure.mgmt.machinelearningservices.models.DeploymentResourceConfiguration + :keyword retry_settings: Retry Settings for the batch inference operation. + If not provided, will default to the defaults defined in BatchRetrySettings. + :paramtype retry_settings: ~azure.mgmt.machinelearningservices.models.BatchRetrySettings + """ + super().__init__( + code_configuration=code_configuration, + description=description, + environment_id=environment_id, + environment_variables=environment_variables, + properties=properties, + **kwargs + ) + self.compute = compute + self.error_threshold = error_threshold + self.logging_level = logging_level + self.max_concurrency_per_instance = max_concurrency_per_instance + self.mini_batch_size = mini_batch_size + self.model = model + self.output_action = output_action + self.output_file_name = output_file_name + self.provisioning_state = None + self.resources = resources + self.retry_settings = retry_settings + + +class BatchDeploymentTrackedResourceArmPaginatedResult(_serialization.Model): + """A paginated list of BatchDeployment entities. + + :ivar next_link: The link to the next page of BatchDeployment objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type BatchDeployment. + :vartype value: list[~azure.mgmt.machinelearningservices.models.BatchDeployment] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[BatchDeployment]"}, + } + + def __init__( + self, *, next_link: Optional[str] = None, value: Optional[List["_models.BatchDeployment"]] = None, **kwargs + ): + """ + :keyword next_link: The link to the next page of BatchDeployment objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type BatchDeployment. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.BatchDeployment] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class BatchEndpoint(TrackedResource): + """BatchEndpoint. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar identity: Managed service identity (system assigned and/or user assigned identities). + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :vartype kind: str + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.BatchEndpointProperties + :ivar sku: Sku details required for ARM contract for Autoscaling. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "BatchEndpointProperties"}, + "sku": {"key": "sku", "type": "Sku"}, + } + + def __init__( + self, + *, + location: str, + properties: "_models.BatchEndpointProperties", + tags: Optional[Dict[str, str]] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, + kind: Optional[str] = None, + sku: Optional["_models.Sku"] = None, + **kwargs + ): + """ + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + :keyword location: The geo-location where the resource lives. Required. + :paramtype location: str + :keyword identity: Managed service identity (system assigned and/or user assigned identities). + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :paramtype kind: str + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.BatchEndpointProperties + :keyword sku: Sku details required for ARM contract for Autoscaling. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + super().__init__(tags=tags, location=location, **kwargs) + self.identity = identity + self.kind = kind + self.properties = properties + self.sku = sku + + +class BatchEndpointDefaults(_serialization.Model): + """Batch endpoint default values. + + :ivar deployment_name: Name of the deployment that will be default for the endpoint. + This deployment will end up getting 100% traffic when the endpoint scoring URL is invoked. + :vartype deployment_name: str + """ + + _attribute_map = { + "deployment_name": {"key": "deploymentName", "type": "str"}, + } + + def __init__(self, *, deployment_name: Optional[str] = None, **kwargs): + """ + :keyword deployment_name: Name of the deployment that will be default for the endpoint. + This deployment will end up getting 100% traffic when the endpoint scoring URL is invoked. + :paramtype deployment_name: str + """ + super().__init__(**kwargs) + self.deployment_name = deployment_name + + +class EndpointPropertiesBase(_serialization.Model): + """Inference Endpoint base definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_mode: [Required] Use 'Key' for key based authentication and 'AMLToken' for Azure + Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. + Required. Known values are: "AMLToken", "Key", and "AADToken". + :vartype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode + :ivar description: Description of the inference endpoint. + :vartype description: str + :ivar keys: EndpointAuthKeys to set initially on an Endpoint. + This property will always be returned as null. AuthKey values must be retrieved using the + ListKeys API. + :vartype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar scoring_uri: Endpoint URI. + :vartype scoring_uri: str + :ivar swagger_uri: Endpoint Swagger URI. + :vartype swagger_uri: str + """ + + _validation = { + "auth_mode": {"required": True}, + "scoring_uri": {"readonly": True}, + "swagger_uri": {"readonly": True}, + } + + _attribute_map = { + "auth_mode": {"key": "authMode", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "keys": {"key": "keys", "type": "EndpointAuthKeys"}, + "properties": {"key": "properties", "type": "{str}"}, + "scoring_uri": {"key": "scoringUri", "type": "str"}, + "swagger_uri": {"key": "swaggerUri", "type": "str"}, + } + + def __init__( + self, + *, + auth_mode: Union[str, "_models.EndpointAuthMode"], + description: Optional[str] = None, + keys: Optional["_models.EndpointAuthKeys"] = None, + properties: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword auth_mode: [Required] Use 'Key' for key based authentication and 'AMLToken' for Azure + Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. + Required. Known values are: "AMLToken", "Key", and "AADToken". + :paramtype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode + :keyword description: Description of the inference endpoint. + :paramtype description: str + :keyword keys: EndpointAuthKeys to set initially on an Endpoint. + This property will always be returned as null. AuthKey values must be retrieved using the + ListKeys API. + :paramtype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + """ + super().__init__(**kwargs) + self.auth_mode = auth_mode + self.description = description + self.keys = keys + self.properties = properties + self.scoring_uri = None + self.swagger_uri = None + + +class BatchEndpointProperties(EndpointPropertiesBase): + """Batch endpoint configuration. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_mode: [Required] Use 'Key' for key based authentication and 'AMLToken' for Azure + Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. + Required. Known values are: "AMLToken", "Key", and "AADToken". + :vartype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode + :ivar description: Description of the inference endpoint. + :vartype description: str + :ivar keys: EndpointAuthKeys to set initially on an Endpoint. + This property will always be returned as null. AuthKey values must be retrieved using the + ListKeys API. + :vartype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar scoring_uri: Endpoint URI. + :vartype scoring_uri: str + :ivar swagger_uri: Endpoint Swagger URI. + :vartype swagger_uri: str + :ivar defaults: Default values for Batch Endpoint. + :vartype defaults: ~azure.mgmt.machinelearningservices.models.BatchEndpointDefaults + :ivar provisioning_state: Provisioning state for the endpoint. Known values are: "Creating", + "Deleting", "Succeeded", "Failed", "Updating", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.EndpointProvisioningState + """ + + _validation = { + "auth_mode": {"required": True}, + "scoring_uri": {"readonly": True}, + "swagger_uri": {"readonly": True}, + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "auth_mode": {"key": "authMode", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "keys": {"key": "keys", "type": "EndpointAuthKeys"}, + "properties": {"key": "properties", "type": "{str}"}, + "scoring_uri": {"key": "scoringUri", "type": "str"}, + "swagger_uri": {"key": "swaggerUri", "type": "str"}, + "defaults": {"key": "defaults", "type": "BatchEndpointDefaults"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + } + + def __init__( + self, + *, + auth_mode: Union[str, "_models.EndpointAuthMode"], + description: Optional[str] = None, + keys: Optional["_models.EndpointAuthKeys"] = None, + properties: Optional[Dict[str, str]] = None, + defaults: Optional["_models.BatchEndpointDefaults"] = None, + **kwargs + ): + """ + :keyword auth_mode: [Required] Use 'Key' for key based authentication and 'AMLToken' for Azure + Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. + Required. Known values are: "AMLToken", "Key", and "AADToken". + :paramtype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode + :keyword description: Description of the inference endpoint. + :paramtype description: str + :keyword keys: EndpointAuthKeys to set initially on an Endpoint. + This property will always be returned as null. AuthKey values must be retrieved using the + ListKeys API. + :paramtype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword defaults: Default values for Batch Endpoint. + :paramtype defaults: ~azure.mgmt.machinelearningservices.models.BatchEndpointDefaults + """ + super().__init__(auth_mode=auth_mode, description=description, keys=keys, properties=properties, **kwargs) + self.defaults = defaults + self.provisioning_state = None + + +class BatchEndpointTrackedResourceArmPaginatedResult(_serialization.Model): + """A paginated list of BatchEndpoint entities. + + :ivar next_link: The link to the next page of BatchEndpoint objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type BatchEndpoint. + :vartype value: list[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[BatchEndpoint]"}, + } + + def __init__( + self, *, next_link: Optional[str] = None, value: Optional[List["_models.BatchEndpoint"]] = None, **kwargs + ): + """ + :keyword next_link: The link to the next page of BatchEndpoint objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type BatchEndpoint. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class BatchRetrySettings(_serialization.Model): + """Retry settings for a batch inference operation. + + :ivar max_retries: Maximum retry count for a mini-batch. + :vartype max_retries: int + :ivar timeout: Invocation timeout for a mini-batch, in ISO 8601 format. + :vartype timeout: ~datetime.timedelta + """ + + _attribute_map = { + "max_retries": {"key": "maxRetries", "type": "int"}, + "timeout": {"key": "timeout", "type": "duration"}, + } + + def __init__(self, *, max_retries: int = 3, timeout: datetime.timedelta = "PT30S", **kwargs): + """ + :keyword max_retries: Maximum retry count for a mini-batch. + :paramtype max_retries: int + :keyword timeout: Invocation timeout for a mini-batch, in ISO 8601 format. + :paramtype timeout: ~datetime.timedelta + """ + super().__init__(**kwargs) + self.max_retries = max_retries + self.timeout = timeout + + +class SamplingAlgorithm(_serialization.Model): + """The Sampling Algorithm used to generate hyperparameter values, along with properties to + configure the algorithm. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + BayesianSamplingAlgorithm, GridSamplingAlgorithm, RandomSamplingAlgorithm + + All required parameters must be populated in order to send to Azure. + + :ivar sampling_algorithm_type: [Required] The algorithm used for generating hyperparameter + values, along with configuration properties. Required. Known values are: "Grid", "Random", and + "Bayesian". + :vartype sampling_algorithm_type: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + """ + + _validation = { + "sampling_algorithm_type": {"required": True}, + } + + _attribute_map = { + "sampling_algorithm_type": {"key": "samplingAlgorithmType", "type": "str"}, + } + + _subtype_map = { + "sampling_algorithm_type": { + "Bayesian": "BayesianSamplingAlgorithm", + "Grid": "GridSamplingAlgorithm", + "Random": "RandomSamplingAlgorithm", + } + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.sampling_algorithm_type = None # type: Optional[str] + + +class BayesianSamplingAlgorithm(SamplingAlgorithm): + """Defines a Sampling Algorithm that generates values based on previous values. + + All required parameters must be populated in order to send to Azure. + + :ivar sampling_algorithm_type: [Required] The algorithm used for generating hyperparameter + values, along with configuration properties. Required. Known values are: "Grid", "Random", and + "Bayesian". + :vartype sampling_algorithm_type: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + """ + + _validation = { + "sampling_algorithm_type": {"required": True}, + } + + _attribute_map = { + "sampling_algorithm_type": {"key": "samplingAlgorithmType", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.sampling_algorithm_type = "Bayesian" # type: str + + +class BindOptions(_serialization.Model): + """BindOptions. + + :ivar propagation: Type of Bind Option. + :vartype propagation: str + :ivar create_host_path: Indicate whether to create host path. + :vartype create_host_path: bool + :ivar selinux: Mention the selinux options. + :vartype selinux: str + """ + + _attribute_map = { + "propagation": {"key": "propagation", "type": "str"}, + "create_host_path": {"key": "createHostPath", "type": "bool"}, + "selinux": {"key": "selinux", "type": "str"}, + } + + def __init__( + self, + *, + propagation: Optional[str] = None, + create_host_path: Optional[bool] = None, + selinux: Optional[str] = None, + **kwargs + ): + """ + :keyword propagation: Type of Bind Option. + :paramtype propagation: str + :keyword create_host_path: Indicate whether to create host path. + :paramtype create_host_path: bool + :keyword selinux: Mention the selinux options. + :paramtype selinux: str + """ + super().__init__(**kwargs) + self.propagation = propagation + self.create_host_path = create_host_path + self.selinux = selinux + + +class BuildContext(_serialization.Model): + """Configuration settings for Docker build context. + + All required parameters must be populated in order to send to Azure. + + :ivar context_uri: [Required] URI of the Docker build context used to build the image. Supports + blob URIs on environment creation and may return blob or Git URIs. + + + .. raw:: html + + . + Required. + :vartype context_uri: str + :ivar dockerfile_path: Path to the Dockerfile in the build context. + + + .. raw:: html + + . + :vartype dockerfile_path: str + """ + + _validation = { + "context_uri": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "context_uri": {"key": "contextUri", "type": "str"}, + "dockerfile_path": {"key": "dockerfilePath", "type": "str"}, + } + + def __init__(self, *, context_uri: str, dockerfile_path: str = "Dockerfile", **kwargs): + """ + :keyword context_uri: [Required] URI of the Docker build context used to build the image. + Supports blob URIs on environment creation and may return blob or Git URIs. + + + .. raw:: html + + . + Required. + :paramtype context_uri: str + :keyword dockerfile_path: Path to the Dockerfile in the build context. + + + .. raw:: html + + . + :paramtype dockerfile_path: str + """ + super().__init__(**kwargs) + self.context_uri = context_uri + self.dockerfile_path = dockerfile_path + + +class CertificateDatastoreCredentials(DatastoreCredentials): + """Certificate datastore credentials configuration. + + All required parameters must be populated in order to send to Azure. + + :ivar credentials_type: [Required] Credential type used to authentication with storage. + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". + :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType + :ivar authority_url: Authority URL used for authentication. + :vartype authority_url: str + :ivar client_id: [Required] Service principal client ID. Required. + :vartype client_id: str + :ivar resource_url: Resource the service principal has access to. + :vartype resource_url: str + :ivar secrets: [Required] Service principal secrets. Required. + :vartype secrets: ~azure.mgmt.machinelearningservices.models.CertificateDatastoreSecrets + :ivar tenant_id: [Required] ID of the tenant to which the service principal belongs. Required. + :vartype tenant_id: str + :ivar thumbprint: [Required] Thumbprint of the certificate used for authentication. Required. + :vartype thumbprint: str + """ + + _validation = { + "credentials_type": {"required": True}, + "client_id": {"required": True}, + "secrets": {"required": True}, + "tenant_id": {"required": True}, + "thumbprint": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "credentials_type": {"key": "credentialsType", "type": "str"}, + "authority_url": {"key": "authorityUrl", "type": "str"}, + "client_id": {"key": "clientId", "type": "str"}, + "resource_url": {"key": "resourceUrl", "type": "str"}, + "secrets": {"key": "secrets", "type": "CertificateDatastoreSecrets"}, + "tenant_id": {"key": "tenantId", "type": "str"}, + "thumbprint": {"key": "thumbprint", "type": "str"}, + } + + def __init__( + self, + *, + client_id: str, + secrets: "_models.CertificateDatastoreSecrets", + tenant_id: str, + thumbprint: str, + authority_url: Optional[str] = None, + resource_url: Optional[str] = None, + **kwargs + ): + """ + :keyword authority_url: Authority URL used for authentication. + :paramtype authority_url: str + :keyword client_id: [Required] Service principal client ID. Required. + :paramtype client_id: str + :keyword resource_url: Resource the service principal has access to. + :paramtype resource_url: str + :keyword secrets: [Required] Service principal secrets. Required. + :paramtype secrets: ~azure.mgmt.machinelearningservices.models.CertificateDatastoreSecrets + :keyword tenant_id: [Required] ID of the tenant to which the service principal belongs. + Required. + :paramtype tenant_id: str + :keyword thumbprint: [Required] Thumbprint of the certificate used for authentication. + Required. + :paramtype thumbprint: str + """ + super().__init__(**kwargs) + self.credentials_type = "Certificate" # type: str + self.authority_url = authority_url + self.client_id = client_id + self.resource_url = resource_url + self.secrets = secrets + self.tenant_id = tenant_id + self.thumbprint = thumbprint + + +class CertificateDatastoreSecrets(DatastoreSecrets): + """Datastore certificate secrets. + + All required parameters must be populated in order to send to Azure. + + :ivar secrets_type: [Required] Credential type used to authentication with storage. Required. + Known values are: "AccountKey", "Certificate", "Sas", "ServicePrincipal", "KerberosPassword", + and "KerberosKeytab". + :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType + :ivar certificate: Service principal certificate. + :vartype certificate: str + """ + + _validation = { + "secrets_type": {"required": True}, + } + + _attribute_map = { + "secrets_type": {"key": "secretsType", "type": "str"}, + "certificate": {"key": "certificate", "type": "str"}, + } + + def __init__(self, *, certificate: Optional[str] = None, **kwargs): + """ + :keyword certificate: Service principal certificate. + :paramtype certificate: str + """ + super().__init__(**kwargs) + self.secrets_type = "Certificate" # type: str + self.certificate = certificate + + +class TableVertical(_serialization.Model): # pylint: disable=too-many-instance-attributes + """Abstract class for AutoML tasks that use table dataset as input - such as Classification/Regression/Forecasting. + + :ivar cv_split_column_names: Columns to use for CVSplit data. + :vartype cv_split_column_names: list[str] + :ivar featurization_settings: Featurization inputs needed for AutoML job. + :vartype featurization_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :ivar fixed_parameters: Model/training parameters that will remain constant throughout + training. + :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters + :ivar limit_settings: Execution constraints for AutoMLJob. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings + :ivar n_cross_validations: Number of cross validation folds to be applied on training dataset + when validation dataset is not provided. + :vartype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] + :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings + :ivar test_data: Test data input. + :vartype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar test_data_size: The fraction of test dataset that needs to be set aside for validation + purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype test_data_size: float + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar weight_column_name: The name of the sample weight column. Automated ML supports a + weighted column as an input, causing rows in the data to be weighted up or down. + :vartype weight_column_name: str + """ + + _attribute_map = { + "cv_split_column_names": {"key": "cvSplitColumnNames", "type": "[str]"}, + "featurization_settings": {"key": "featurizationSettings", "type": "TableVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "TableFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "TableVerticalLimitSettings"}, + "n_cross_validations": {"key": "nCrossValidations", "type": "NCrossValidations"}, + "search_space": {"key": "searchSpace", "type": "[TableParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "TableSweepSettings"}, + "test_data": {"key": "testData", "type": "MLTableJobInput"}, + "test_data_size": {"key": "testDataSize", "type": "float"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "weight_column_name": {"key": "weightColumnName", "type": "str"}, + } + + def __init__( + self, + *, + cv_split_column_names: Optional[List[str]] = None, + featurization_settings: Optional["_models.TableVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.TableFixedParameters"] = None, + limit_settings: Optional["_models.TableVerticalLimitSettings"] = None, + n_cross_validations: Optional["_models.NCrossValidations"] = None, + search_space: Optional[List["_models.TableParameterSubspace"]] = None, + sweep_settings: Optional["_models.TableSweepSettings"] = None, + test_data: Optional["_models.MLTableJobInput"] = None, + test_data_size: Optional[float] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + weight_column_name: Optional[str] = None, + **kwargs + ): + """ + :keyword cv_split_column_names: Columns to use for CVSplit data. + :paramtype cv_split_column_names: list[str] + :keyword featurization_settings: Featurization inputs needed for AutoML job. + :paramtype featurization_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :keyword fixed_parameters: Model/training parameters that will remain constant throughout + training. + :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters + :keyword limit_settings: Execution constraints for AutoMLJob. + :paramtype limit_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings + :keyword n_cross_validations: Number of cross validation folds to be applied on training + dataset + when validation dataset is not provided. + :paramtype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] + :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings + :keyword test_data: Test data input. + :paramtype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword test_data_size: The fraction of test dataset that needs to be set aside for validation + purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype test_data_size: float + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword weight_column_name: The name of the sample weight column. Automated ML supports a + weighted column as an input, causing rows in the data to be weighted up or down. + :paramtype weight_column_name: str + """ + super().__init__(**kwargs) + self.cv_split_column_names = cv_split_column_names + self.featurization_settings = featurization_settings + self.fixed_parameters = fixed_parameters + self.limit_settings = limit_settings + self.n_cross_validations = n_cross_validations + self.search_space = search_space + self.sweep_settings = sweep_settings + self.test_data = test_data + self.test_data_size = test_data_size + self.validation_data = validation_data + self.validation_data_size = validation_data_size + self.weight_column_name = weight_column_name + + +class Classification(TableVertical, AutoMLVertical): # pylint: disable=too-many-instance-attributes + """Classification task in AutoML Table vertical. + + All required parameters must be populated in order to send to Azure. + + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar cv_split_column_names: Columns to use for CVSplit data. + :vartype cv_split_column_names: list[str] + :ivar featurization_settings: Featurization inputs needed for AutoML job. + :vartype featurization_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :ivar fixed_parameters: Model/training parameters that will remain constant throughout + training. + :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters + :ivar limit_settings: Execution constraints for AutoMLJob. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings + :ivar n_cross_validations: Number of cross validation folds to be applied on training dataset + when validation dataset is not provided. + :vartype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] + :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings + :ivar test_data: Test data input. + :vartype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar test_data_size: The fraction of test dataset that needs to be set aside for validation + purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype test_data_size: float + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar weight_column_name: The name of the sample weight column. Automated ML supports a + weighted column as an input, causing rows in the data to be weighted up or down. + :vartype weight_column_name: str + :ivar positive_label: Positive label for binary metrics calculation. + :vartype positive_label: str + :ivar primary_metric: Primary metric for the task. Known values are: "AUCWeighted", "Accuracy", + "NormMacroRecall", "AveragePrecisionScoreWeighted", and "PrecisionScoreWeighted". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics + :ivar training_settings: Inputs for training phase for an AutoML Job. + :vartype training_settings: + ~azure.mgmt.machinelearningservices.models.ClassificationTrainingSettings + """ + + _validation = { + "task_type": {"required": True}, + "training_data": {"required": True}, + } + + _attribute_map = { + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "cv_split_column_names": {"key": "cvSplitColumnNames", "type": "[str]"}, + "featurization_settings": {"key": "featurizationSettings", "type": "TableVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "TableFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "TableVerticalLimitSettings"}, + "n_cross_validations": {"key": "nCrossValidations", "type": "NCrossValidations"}, + "search_space": {"key": "searchSpace", "type": "[TableParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "TableSweepSettings"}, + "test_data": {"key": "testData", "type": "MLTableJobInput"}, + "test_data_size": {"key": "testDataSize", "type": "float"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "weight_column_name": {"key": "weightColumnName", "type": "str"}, + "positive_label": {"key": "positiveLabel", "type": "str"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, + "training_settings": {"key": "trainingSettings", "type": "ClassificationTrainingSettings"}, + } + + def __init__( + self, + *, + training_data: "_models.MLTableJobInput", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + cv_split_column_names: Optional[List[str]] = None, + featurization_settings: Optional["_models.TableVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.TableFixedParameters"] = None, + limit_settings: Optional["_models.TableVerticalLimitSettings"] = None, + n_cross_validations: Optional["_models.NCrossValidations"] = None, + search_space: Optional[List["_models.TableParameterSubspace"]] = None, + sweep_settings: Optional["_models.TableSweepSettings"] = None, + test_data: Optional["_models.MLTableJobInput"] = None, + test_data_size: Optional[float] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + weight_column_name: Optional[str] = None, + positive_label: Optional[str] = None, + primary_metric: Optional[Union[str, "_models.ClassificationPrimaryMetrics"]] = None, + training_settings: Optional["_models.ClassificationTrainingSettings"] = None, + **kwargs + ): + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword cv_split_column_names: Columns to use for CVSplit data. + :paramtype cv_split_column_names: list[str] + :keyword featurization_settings: Featurization inputs needed for AutoML job. + :paramtype featurization_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :keyword fixed_parameters: Model/training parameters that will remain constant throughout + training. + :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters + :keyword limit_settings: Execution constraints for AutoMLJob. + :paramtype limit_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings + :keyword n_cross_validations: Number of cross validation folds to be applied on training + dataset + when validation dataset is not provided. + :paramtype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] + :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings + :keyword test_data: Test data input. + :paramtype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword test_data_size: The fraction of test dataset that needs to be set aside for validation + purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype test_data_size: float + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword weight_column_name: The name of the sample weight column. Automated ML supports a + weighted column as an input, causing rows in the data to be weighted up or down. + :paramtype weight_column_name: str + :keyword positive_label: Positive label for binary metrics calculation. + :paramtype positive_label: str + :keyword primary_metric: Primary metric for the task. Known values are: "AUCWeighted", + "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", and "PrecisionScoreWeighted". + :paramtype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics + :keyword training_settings: Inputs for training phase for an AutoML Job. + :paramtype training_settings: + ~azure.mgmt.machinelearningservices.models.ClassificationTrainingSettings + """ + super().__init__( + cv_split_column_names=cv_split_column_names, + featurization_settings=featurization_settings, + fixed_parameters=fixed_parameters, + limit_settings=limit_settings, + n_cross_validations=n_cross_validations, + search_space=search_space, + sweep_settings=sweep_settings, + test_data=test_data, + test_data_size=test_data_size, + validation_data=validation_data, + validation_data_size=validation_data_size, + weight_column_name=weight_column_name, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type = "Classification" # type: str + self.training_data = training_data + self.positive_label = positive_label + self.primary_metric = primary_metric + self.training_settings = training_settings + self.cv_split_column_names = cv_split_column_names + self.featurization_settings = featurization_settings + self.fixed_parameters = fixed_parameters + self.limit_settings = limit_settings + self.n_cross_validations = n_cross_validations + self.search_space = search_space + self.sweep_settings = sweep_settings + self.test_data = test_data + self.test_data_size = test_data_size + self.validation_data = validation_data + self.validation_data_size = validation_data_size + self.weight_column_name = weight_column_name + + +class TrainingSettings(_serialization.Model): + """Training related configuration. + + :ivar enable_dnn_training: Enable recommendation of DNN models. + :vartype enable_dnn_training: bool + :ivar enable_model_explainability: Flag to turn on explainability on best model. + :vartype enable_model_explainability: bool + :ivar enable_onnx_compatible_models: Flag for enabling onnx compatible models. + :vartype enable_onnx_compatible_models: bool + :ivar enable_stack_ensemble: Enable stack ensemble run. + :vartype enable_stack_ensemble: bool + :ivar enable_vote_ensemble: Enable voting ensemble run. + :vartype enable_vote_ensemble: bool + :ivar ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model + generation, multiple fitted models from the previous child runs are downloaded. + Configure this parameter with a higher value than 300 secs, if more time is needed. + :vartype ensemble_model_download_timeout: ~datetime.timedelta + :ivar stack_ensemble_settings: Stack ensemble settings for stack ensemble run. + :vartype stack_ensemble_settings: + ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + """ + + _attribute_map = { + "enable_dnn_training": {"key": "enableDnnTraining", "type": "bool"}, + "enable_model_explainability": {"key": "enableModelExplainability", "type": "bool"}, + "enable_onnx_compatible_models": {"key": "enableOnnxCompatibleModels", "type": "bool"}, + "enable_stack_ensemble": {"key": "enableStackEnsemble", "type": "bool"}, + "enable_vote_ensemble": {"key": "enableVoteEnsemble", "type": "bool"}, + "ensemble_model_download_timeout": {"key": "ensembleModelDownloadTimeout", "type": "duration"}, + "stack_ensemble_settings": {"key": "stackEnsembleSettings", "type": "StackEnsembleSettings"}, + } + + def __init__( + self, + *, + enable_dnn_training: bool = False, + enable_model_explainability: bool = True, + enable_onnx_compatible_models: bool = False, + enable_stack_ensemble: bool = True, + enable_vote_ensemble: bool = True, + ensemble_model_download_timeout: datetime.timedelta = "PT5M", + stack_ensemble_settings: Optional["_models.StackEnsembleSettings"] = None, + **kwargs + ): + """ + :keyword enable_dnn_training: Enable recommendation of DNN models. + :paramtype enable_dnn_training: bool + :keyword enable_model_explainability: Flag to turn on explainability on best model. + :paramtype enable_model_explainability: bool + :keyword enable_onnx_compatible_models: Flag for enabling onnx compatible models. + :paramtype enable_onnx_compatible_models: bool + :keyword enable_stack_ensemble: Enable stack ensemble run. + :paramtype enable_stack_ensemble: bool + :keyword enable_vote_ensemble: Enable voting ensemble run. + :paramtype enable_vote_ensemble: bool + :keyword ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model + generation, multiple fitted models from the previous child runs are downloaded. + Configure this parameter with a higher value than 300 secs, if more time is needed. + :paramtype ensemble_model_download_timeout: ~datetime.timedelta + :keyword stack_ensemble_settings: Stack ensemble settings for stack ensemble run. + :paramtype stack_ensemble_settings: + ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + """ + super().__init__(**kwargs) + self.enable_dnn_training = enable_dnn_training + self.enable_model_explainability = enable_model_explainability + self.enable_onnx_compatible_models = enable_onnx_compatible_models + self.enable_stack_ensemble = enable_stack_ensemble + self.enable_vote_ensemble = enable_vote_ensemble + self.ensemble_model_download_timeout = ensemble_model_download_timeout + self.stack_ensemble_settings = stack_ensemble_settings + + +class ClassificationTrainingSettings(TrainingSettings): + """Classification Training related configuration. + + :ivar enable_dnn_training: Enable recommendation of DNN models. + :vartype enable_dnn_training: bool + :ivar enable_model_explainability: Flag to turn on explainability on best model. + :vartype enable_model_explainability: bool + :ivar enable_onnx_compatible_models: Flag for enabling onnx compatible models. + :vartype enable_onnx_compatible_models: bool + :ivar enable_stack_ensemble: Enable stack ensemble run. + :vartype enable_stack_ensemble: bool + :ivar enable_vote_ensemble: Enable voting ensemble run. + :vartype enable_vote_ensemble: bool + :ivar ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model + generation, multiple fitted models from the previous child runs are downloaded. + Configure this parameter with a higher value than 300 secs, if more time is needed. + :vartype ensemble_model_download_timeout: ~datetime.timedelta + :ivar stack_ensemble_settings: Stack ensemble settings for stack ensemble run. + :vartype stack_ensemble_settings: + ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + :ivar allowed_training_algorithms: Allowed models for classification task. + :vartype allowed_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.ClassificationModels] + :ivar blocked_training_algorithms: Blocked models for classification task. + :vartype blocked_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.ClassificationModels] + """ + + _attribute_map = { + "enable_dnn_training": {"key": "enableDnnTraining", "type": "bool"}, + "enable_model_explainability": {"key": "enableModelExplainability", "type": "bool"}, + "enable_onnx_compatible_models": {"key": "enableOnnxCompatibleModels", "type": "bool"}, + "enable_stack_ensemble": {"key": "enableStackEnsemble", "type": "bool"}, + "enable_vote_ensemble": {"key": "enableVoteEnsemble", "type": "bool"}, + "ensemble_model_download_timeout": {"key": "ensembleModelDownloadTimeout", "type": "duration"}, + "stack_ensemble_settings": {"key": "stackEnsembleSettings", "type": "StackEnsembleSettings"}, + "allowed_training_algorithms": {"key": "allowedTrainingAlgorithms", "type": "[str]"}, + "blocked_training_algorithms": {"key": "blockedTrainingAlgorithms", "type": "[str]"}, + } + + def __init__( + self, + *, + enable_dnn_training: bool = False, + enable_model_explainability: bool = True, + enable_onnx_compatible_models: bool = False, + enable_stack_ensemble: bool = True, + enable_vote_ensemble: bool = True, + ensemble_model_download_timeout: datetime.timedelta = "PT5M", + stack_ensemble_settings: Optional["_models.StackEnsembleSettings"] = None, + allowed_training_algorithms: Optional[List[Union[str, "_models.ClassificationModels"]]] = None, + blocked_training_algorithms: Optional[List[Union[str, "_models.ClassificationModels"]]] = None, + **kwargs + ): + """ + :keyword enable_dnn_training: Enable recommendation of DNN models. + :paramtype enable_dnn_training: bool + :keyword enable_model_explainability: Flag to turn on explainability on best model. + :paramtype enable_model_explainability: bool + :keyword enable_onnx_compatible_models: Flag for enabling onnx compatible models. + :paramtype enable_onnx_compatible_models: bool + :keyword enable_stack_ensemble: Enable stack ensemble run. + :paramtype enable_stack_ensemble: bool + :keyword enable_vote_ensemble: Enable voting ensemble run. + :paramtype enable_vote_ensemble: bool + :keyword ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model + generation, multiple fitted models from the previous child runs are downloaded. + Configure this parameter with a higher value than 300 secs, if more time is needed. + :paramtype ensemble_model_download_timeout: ~datetime.timedelta + :keyword stack_ensemble_settings: Stack ensemble settings for stack ensemble run. + :paramtype stack_ensemble_settings: + ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + :keyword allowed_training_algorithms: Allowed models for classification task. + :paramtype allowed_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.ClassificationModels] + :keyword blocked_training_algorithms: Blocked models for classification task. + :paramtype blocked_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.ClassificationModels] + """ + super().__init__( + enable_dnn_training=enable_dnn_training, + enable_model_explainability=enable_model_explainability, + enable_onnx_compatible_models=enable_onnx_compatible_models, + enable_stack_ensemble=enable_stack_ensemble, + enable_vote_ensemble=enable_vote_ensemble, + ensemble_model_download_timeout=ensemble_model_download_timeout, + stack_ensemble_settings=stack_ensemble_settings, + **kwargs + ) + self.allowed_training_algorithms = allowed_training_algorithms + self.blocked_training_algorithms = blocked_training_algorithms + + +class ClusterUpdateParameters(_serialization.Model): + """AmlCompute update parameters. + + :ivar properties: Properties of ClusterUpdate. + :vartype properties: ~azure.mgmt.machinelearningservices.models.ScaleSettingsInformation + """ + + _attribute_map = { + "properties": {"key": "properties.properties", "type": "ScaleSettingsInformation"}, + } + + def __init__(self, *, properties: Optional["_models.ScaleSettingsInformation"] = None, **kwargs): + """ + :keyword properties: Properties of ClusterUpdate. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.ScaleSettingsInformation + """ + super().__init__(**kwargs) + self.properties = properties + + +class ExportSummary(_serialization.Model): + """ExportSummary. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CsvExportSummary, CocoExportSummary, DatasetExportSummary + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar end_date_time: The time when the export was completed. + :vartype end_date_time: ~datetime.datetime + :ivar exported_row_count: The total number of labeled datapoints exported. + :vartype exported_row_count: int + :ivar format: [Required] The format of exported labels, also as the discriminator. Required. + Known values are: "Dataset", "Coco", and "CSV". + :vartype format: str or ~azure.mgmt.machinelearningservices.models.ExportFormatType + :ivar labeling_job_id: Name and identifier of the job containing exported labels. + :vartype labeling_job_id: str + :ivar start_date_time: The time when the export was requested. + :vartype start_date_time: ~datetime.datetime + """ + + _validation = { + "end_date_time": {"readonly": True}, + "exported_row_count": {"readonly": True}, + "format": {"required": True}, + "labeling_job_id": {"readonly": True}, + "start_date_time": {"readonly": True}, + } + + _attribute_map = { + "end_date_time": {"key": "endDateTime", "type": "iso-8601"}, + "exported_row_count": {"key": "exportedRowCount", "type": "int"}, + "format": {"key": "format", "type": "str"}, + "labeling_job_id": {"key": "labelingJobId", "type": "str"}, + "start_date_time": {"key": "startDateTime", "type": "iso-8601"}, + } + + _subtype_map = { + "format": {"CSV": "CsvExportSummary", "Coco": "CocoExportSummary", "Dataset": "DatasetExportSummary"} + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.end_date_time = None + self.exported_row_count = None + self.format = None # type: Optional[str] + self.labeling_job_id = None + self.start_date_time = None + + +class CocoExportSummary(ExportSummary): + """CocoExportSummary. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar end_date_time: The time when the export was completed. + :vartype end_date_time: ~datetime.datetime + :ivar exported_row_count: The total number of labeled datapoints exported. + :vartype exported_row_count: int + :ivar format: [Required] The format of exported labels, also as the discriminator. Required. + Known values are: "Dataset", "Coco", and "CSV". + :vartype format: str or ~azure.mgmt.machinelearningservices.models.ExportFormatType + :ivar labeling_job_id: Name and identifier of the job containing exported labels. + :vartype labeling_job_id: str + :ivar start_date_time: The time when the export was requested. + :vartype start_date_time: ~datetime.datetime + :ivar container_name: The container name to which the labels will be exported. + :vartype container_name: str + :ivar snapshot_path: The output path where the labels will be exported. + :vartype snapshot_path: str + """ + + _validation = { + "end_date_time": {"readonly": True}, + "exported_row_count": {"readonly": True}, + "format": {"required": True}, + "labeling_job_id": {"readonly": True}, + "start_date_time": {"readonly": True}, + "container_name": {"readonly": True}, + "snapshot_path": {"readonly": True}, + } + + _attribute_map = { + "end_date_time": {"key": "endDateTime", "type": "iso-8601"}, + "exported_row_count": {"key": "exportedRowCount", "type": "int"}, + "format": {"key": "format", "type": "str"}, + "labeling_job_id": {"key": "labelingJobId", "type": "str"}, + "start_date_time": {"key": "startDateTime", "type": "iso-8601"}, + "container_name": {"key": "containerName", "type": "str"}, + "snapshot_path": {"key": "snapshotPath", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.format = "Coco" # type: str + self.container_name = None + self.snapshot_path = None + + +class CodeConfiguration(_serialization.Model): + """Configuration for a scoring code asset. + + All required parameters must be populated in order to send to Azure. + + :ivar code_id: ARM resource ID of the code asset. + :vartype code_id: str + :ivar scoring_script: [Required] The script to execute on startup. eg. "score.py". Required. + :vartype scoring_script: str + """ + + _validation = { + "scoring_script": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "code_id": {"key": "codeId", "type": "str"}, + "scoring_script": {"key": "scoringScript", "type": "str"}, + } + + def __init__(self, *, scoring_script: str, code_id: Optional[str] = None, **kwargs): + """ + :keyword code_id: ARM resource ID of the code asset. + :paramtype code_id: str + :keyword scoring_script: [Required] The script to execute on startup. eg. "score.py". Required. + :paramtype scoring_script: str + """ + super().__init__(**kwargs) + self.code_id = code_id + self.scoring_script = scoring_script + + +class CodeContainer(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.CodeContainerProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "CodeContainerProperties"}, + } + + def __init__(self, *, properties: "_models.CodeContainerProperties", **kwargs): + """ + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.CodeContainerProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class CodeContainerProperties(AssetContainer): + """Container for code asset versions. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar latest_version: The latest version inside this container. + :vartype latest_version: str + :ivar next_version: The next auto incremental version. + :vartype next_version: str + :ivar provisioning_state: Provisioning state for the code container. Known values are: + "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.AssetProvisioningState + """ + + _validation = { + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_archived: bool = False, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + """ + super().__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) + self.provisioning_state = None + + +class CodeContainerResourceArmPaginatedResult(_serialization.Model): + """A paginated list of CodeContainer entities. + + :ivar next_link: The link to the next page of CodeContainer objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type CodeContainer. + :vartype value: list[~azure.mgmt.machinelearningservices.models.CodeContainer] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[CodeContainer]"}, + } + + def __init__( + self, *, next_link: Optional[str] = None, value: Optional[List["_models.CodeContainer"]] = None, **kwargs + ): + """ + :keyword next_link: The link to the next page of CodeContainer objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type CodeContainer. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.CodeContainer] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class CodeVersion(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.CodeVersionProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "CodeVersionProperties"}, + } + + def __init__(self, *, properties: "_models.CodeVersionProperties", **kwargs): + """ + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.CodeVersionProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class CodeVersionProperties(AssetBase): + """Code asset version details. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_anonymous: If the name version are system generated (anonymous registration). + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar code_uri: Uri where code is located. + :vartype code_uri: str + :ivar provisioning_state: Provisioning state for the code version. Known values are: + "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.AssetProvisioningState + """ + + _validation = { + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "code_uri": {"key": "codeUri", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_anonymous: bool = False, + is_archived: bool = False, + code_uri: Optional[str] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_anonymous: If the name version are system generated (anonymous registration). + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword code_uri: Uri where code is located. + :paramtype code_uri: str + """ + super().__init__( + description=description, + properties=properties, + tags=tags, + is_anonymous=is_anonymous, + is_archived=is_archived, + **kwargs + ) + self.code_uri = code_uri + self.provisioning_state = None + + +class CodeVersionResourceArmPaginatedResult(_serialization.Model): + """A paginated list of CodeVersion entities. + + :ivar next_link: The link to the next page of CodeVersion objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type CodeVersion. + :vartype value: list[~azure.mgmt.machinelearningservices.models.CodeVersion] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[CodeVersion]"}, + } + + def __init__( + self, *, next_link: Optional[str] = None, value: Optional[List["_models.CodeVersion"]] = None, **kwargs + ): + """ + :keyword next_link: The link to the next page of CodeVersion objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type CodeVersion. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.CodeVersion] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class ColumnTransformer(_serialization.Model): + """Column transformer parameters. + + :ivar fields: Fields to apply transformer logic on. + :vartype fields: list[str] + :ivar parameters: Different properties to be passed to transformer. + Input expected is dictionary of key,value pairs in JSON format. + :vartype parameters: JSON + """ + + _attribute_map = { + "fields": {"key": "fields", "type": "[str]"}, + "parameters": {"key": "parameters", "type": "object"}, + } + + def __init__(self, *, fields: Optional[List[str]] = None, parameters: Optional[JSON] = None, **kwargs): + """ + :keyword fields: Fields to apply transformer logic on. + :paramtype fields: list[str] + :keyword parameters: Different properties to be passed to transformer. + Input expected is dictionary of key,value pairs in JSON format. + :paramtype parameters: JSON + """ + super().__init__(**kwargs) + self.fields = fields + self.parameters = parameters + + +class CommandJob(JobBaseProperties): # pylint: disable=too-many-instance-attributes + """Command job definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar component_id: ARM resource ID of the component resource. + :vartype component_id: str + :ivar compute_id: ARM resource ID of the compute resource. + :vartype compute_id: str + :ivar display_name: Display name of job. + :vartype display_name: str + :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :vartype experiment_name: str + :ivar identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar job_type: [Required] Specifies the type of job. Required. Known values are: "AutoML", + "Command", "Labeling", "Sweep", "Pipeline", and "Spark". + :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :ivar services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", and "Scheduled". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus + :ivar autologger_settings: Distribution configuration of the job. If set, this should be one of + Mpi, Tensorflow, PyTorch, or null. + :vartype autologger_settings: ~azure.mgmt.machinelearningservices.models.AutologgerSettings + :ivar code_id: ARM resource ID of the code asset. + :vartype code_id: str + :ivar command: [Required] The command to execute on startup of the job. eg. "python train.py". + Required. + :vartype command: str + :ivar distribution: Distribution configuration of the job. If set, this should be one of Mpi, + Tensorflow, PyTorch, or null. + :vartype distribution: ~azure.mgmt.machinelearningservices.models.DistributionConfiguration + :ivar environment_id: [Required] The ARM resource ID of the Environment specification for the + job. Required. + :vartype environment_id: str + :ivar environment_variables: Environment variables included in the job. + :vartype environment_variables: dict[str, str] + :ivar inputs: Mapping of input data bindings used in the job. + :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] + :ivar limits: Command Job limit. + :vartype limits: ~azure.mgmt.machinelearningservices.models.CommandJobLimits + :ivar outputs: Mapping of output data bindings used in the job. + :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :ivar parameters: Input parameters. + :vartype parameters: JSON + :ivar resources: Compute Resource configuration for the job. + :vartype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration + """ + + _validation = { + "job_type": {"required": True}, + "status": {"readonly": True}, + "command": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "environment_id": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + "parameters": {"readonly": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, + "autologger_settings": {"key": "autologgerSettings", "type": "AutologgerSettings"}, + "code_id": {"key": "codeId", "type": "str"}, + "command": {"key": "command", "type": "str"}, + "distribution": {"key": "distribution", "type": "DistributionConfiguration"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "inputs": {"key": "inputs", "type": "{JobInput}"}, + "limits": {"key": "limits", "type": "CommandJobLimits"}, + "outputs": {"key": "outputs", "type": "{JobOutput}"}, + "parameters": {"key": "parameters", "type": "object"}, + "resources": {"key": "resources", "type": "JobResourceConfiguration"}, + } + + def __init__( + self, + *, + command: str, + environment_id: str, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + component_id: Optional[str] = None, + compute_id: Optional[str] = None, + display_name: Optional[str] = None, + experiment_name: str = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: bool = False, + services: Optional[Dict[str, "_models.JobService"]] = None, + autologger_settings: Optional["_models.AutologgerSettings"] = None, + code_id: Optional[str] = None, + distribution: Optional["_models.DistributionConfiguration"] = None, + environment_variables: Optional[Dict[str, str]] = None, + inputs: Optional[Dict[str, "_models.JobInput"]] = None, + limits: Optional["_models.CommandJobLimits"] = None, + outputs: Optional[Dict[str, "_models.JobOutput"]] = None, + resources: Optional["_models.JobResourceConfiguration"] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword component_id: ARM resource ID of the component resource. + :paramtype component_id: str + :keyword compute_id: ARM resource ID of the compute resource. + :paramtype compute_id: str + :keyword display_name: Display name of job. + :paramtype display_name: str + :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :paramtype experiment_name: str + :keyword identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :keyword autologger_settings: Distribution configuration of the job. If set, this should be one + of Mpi, Tensorflow, PyTorch, or null. + :paramtype autologger_settings: ~azure.mgmt.machinelearningservices.models.AutologgerSettings + :keyword code_id: ARM resource ID of the code asset. + :paramtype code_id: str + :keyword command: [Required] The command to execute on startup of the job. eg. "python + train.py". Required. + :paramtype command: str + :keyword distribution: Distribution configuration of the job. If set, this should be one of + Mpi, Tensorflow, PyTorch, or null. + :paramtype distribution: ~azure.mgmt.machinelearningservices.models.DistributionConfiguration + :keyword environment_id: [Required] The ARM resource ID of the Environment specification for + the job. Required. + :paramtype environment_id: str + :keyword environment_variables: Environment variables included in the job. + :paramtype environment_variables: dict[str, str] + :keyword inputs: Mapping of input data bindings used in the job. + :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] + :keyword limits: Command Job limit. + :paramtype limits: ~azure.mgmt.machinelearningservices.models.CommandJobLimits + :keyword outputs: Mapping of output data bindings used in the job. + :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :keyword resources: Compute Resource configuration for the job. + :paramtype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration + """ + super().__init__( + description=description, + properties=properties, + tags=tags, + component_id=component_id, + compute_id=compute_id, + display_name=display_name, + experiment_name=experiment_name, + identity=identity, + is_archived=is_archived, + services=services, + **kwargs + ) + self.job_type = "Command" # type: str + self.autologger_settings = autologger_settings + self.code_id = code_id + self.command = command + self.distribution = distribution + self.environment_id = environment_id + self.environment_variables = environment_variables + self.inputs = inputs + self.limits = limits + self.outputs = outputs + self.parameters = None + self.resources = resources + + +class JobLimits(_serialization.Model): + """JobLimits. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CommandJobLimits, SweepJobLimits + + All required parameters must be populated in order to send to Azure. + + :ivar job_limits_type: [Required] JobLimit type. Required. Known values are: "Command" and + "Sweep". + :vartype job_limits_type: str or ~azure.mgmt.machinelearningservices.models.JobLimitsType + :ivar timeout: The max run duration in ISO 8601 format, after which the job will be cancelled. + Only supports duration with precision as low as Seconds. + :vartype timeout: ~datetime.timedelta + """ + + _validation = { + "job_limits_type": {"required": True}, + } + + _attribute_map = { + "job_limits_type": {"key": "jobLimitsType", "type": "str"}, + "timeout": {"key": "timeout", "type": "duration"}, + } + + _subtype_map = {"job_limits_type": {"Command": "CommandJobLimits", "Sweep": "SweepJobLimits"}} + + def __init__(self, *, timeout: Optional[datetime.timedelta] = None, **kwargs): + """ + :keyword timeout: The max run duration in ISO 8601 format, after which the job will be + cancelled. Only supports duration with precision as low as Seconds. + :paramtype timeout: ~datetime.timedelta + """ + super().__init__(**kwargs) + self.job_limits_type = None # type: Optional[str] + self.timeout = timeout + + +class CommandJobLimits(JobLimits): + """Command Job limit class. + + All required parameters must be populated in order to send to Azure. + + :ivar job_limits_type: [Required] JobLimit type. Required. Known values are: "Command" and + "Sweep". + :vartype job_limits_type: str or ~azure.mgmt.machinelearningservices.models.JobLimitsType + :ivar timeout: The max run duration in ISO 8601 format, after which the job will be cancelled. + Only supports duration with precision as low as Seconds. + :vartype timeout: ~datetime.timedelta + """ + + _validation = { + "job_limits_type": {"required": True}, + } + + _attribute_map = { + "job_limits_type": {"key": "jobLimitsType", "type": "str"}, + "timeout": {"key": "timeout", "type": "duration"}, + } + + def __init__(self, *, timeout: Optional[datetime.timedelta] = None, **kwargs): + """ + :keyword timeout: The max run duration in ISO 8601 format, after which the job will be + cancelled. Only supports duration with precision as low as Seconds. + :paramtype timeout: ~datetime.timedelta + """ + super().__init__(timeout=timeout, **kwargs) + self.job_limits_type = "Command" # type: str + + +class ComponentContainer(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.ComponentContainerProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ComponentContainerProperties"}, + } + + def __init__(self, *, properties: "_models.ComponentContainerProperties", **kwargs): + """ + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.ComponentContainerProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class ComponentContainerProperties(AssetContainer): + """Component container definition. + + + .. raw:: html + + . + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar latest_version: The latest version inside this container. + :vartype latest_version: str + :ivar next_version: The next auto incremental version. + :vartype next_version: str + :ivar provisioning_state: Provisioning state for the component container. Known values are: + "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.AssetProvisioningState + """ + + _validation = { + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_archived: bool = False, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + """ + super().__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) + self.provisioning_state = None + + +class ComponentContainerResourceArmPaginatedResult(_serialization.Model): + """A paginated list of ComponentContainer entities. + + :ivar next_link: The link to the next page of ComponentContainer objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type ComponentContainer. + :vartype value: list[~azure.mgmt.machinelearningservices.models.ComponentContainer] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[ComponentContainer]"}, + } + + def __init__( + self, *, next_link: Optional[str] = None, value: Optional[List["_models.ComponentContainer"]] = None, **kwargs + ): + """ + :keyword next_link: The link to the next page of ComponentContainer objects. If null, there are + no additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type ComponentContainer. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.ComponentContainer] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class ComponentVersion(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.ComponentVersionProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ComponentVersionProperties"}, + } + + def __init__(self, *, properties: "_models.ComponentVersionProperties", **kwargs): + """ + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.ComponentVersionProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class ComponentVersionProperties(AssetBase): + """Definition of a component version: defines resources that span component types. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_anonymous: If the name version are system generated (anonymous registration). + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar component_spec: Defines Component definition details. + + + .. raw:: html + + . + :vartype component_spec: JSON + :ivar provisioning_state: Provisioning state for the component version. Known values are: + "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.AssetProvisioningState + """ + + _validation = { + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "component_spec": {"key": "componentSpec", "type": "object"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_anonymous: bool = False, + is_archived: bool = False, + component_spec: Optional[JSON] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_anonymous: If the name version are system generated (anonymous registration). + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword component_spec: Defines Component definition details. + + + .. raw:: html + + . + :paramtype component_spec: JSON + """ + super().__init__( + description=description, + properties=properties, + tags=tags, + is_anonymous=is_anonymous, + is_archived=is_archived, + **kwargs + ) + self.component_spec = component_spec + self.provisioning_state = None + + +class ComponentVersionResourceArmPaginatedResult(_serialization.Model): + """A paginated list of ComponentVersion entities. + + :ivar next_link: The link to the next page of ComponentVersion objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type ComponentVersion. + :vartype value: list[~azure.mgmt.machinelearningservices.models.ComponentVersion] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[ComponentVersion]"}, + } + + def __init__( + self, *, next_link: Optional[str] = None, value: Optional[List["_models.ComponentVersion"]] = None, **kwargs + ): + """ + :keyword next_link: The link to the next page of ComponentVersion objects. If null, there are + no additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type ComponentVersion. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.ComponentVersion] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class ComputeInstanceSchema(_serialization.Model): + """Properties(top level) of ComputeInstance. + + :ivar properties: Properties of ComputeInstance. + :vartype properties: ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties + """ + + _attribute_map = { + "properties": {"key": "properties", "type": "ComputeInstanceProperties"}, + } + + def __init__(self, *, properties: Optional["_models.ComputeInstanceProperties"] = None, **kwargs): + """ + :keyword properties: Properties of ComputeInstance. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class ComputeInstance(Compute, ComputeInstanceSchema): # pylint: disable=too-many-instance-attributes + """An Azure Machine Learning compute instance. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar properties: Properties of ComputeInstance. + :vartype properties: ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ComputeProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: ~datetime.datetime + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool + """ + + _validation = { + "compute_type": {"required": True}, + "compute_location": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, + "disable_local_auth": {"readonly": True}, + } + + _attribute_map = { + "properties": {"key": "properties", "type": "ComputeInstanceProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, + } + + def __init__( + self, + *, + properties: Optional["_models.ComputeInstanceProperties"] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + **kwargs + ): + """ + :keyword properties: Properties of ComputeInstance. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + """ + super().__init__(description=description, resource_id=resource_id, properties=properties, **kwargs) + self.properties = properties + self.compute_type = "ComputeInstance" # type: str + self.compute_location = None + self.provisioning_state = None + self.description = description + self.created_on = None + self.modified_on = None + self.resource_id = resource_id + self.provisioning_errors = None + self.is_attached_compute = None + self.disable_local_auth = None + + +class ComputeInstanceApplication(_serialization.Model): + """Defines an Aml Instance application and its connectivity endpoint URI. + + :ivar display_name: Name of the ComputeInstance application. + :vartype display_name: str + :ivar endpoint_uri: Application' endpoint URI. + :vartype endpoint_uri: str + """ + + _attribute_map = { + "display_name": {"key": "displayName", "type": "str"}, + "endpoint_uri": {"key": "endpointUri", "type": "str"}, + } + + def __init__(self, *, display_name: Optional[str] = None, endpoint_uri: Optional[str] = None, **kwargs): + """ + :keyword display_name: Name of the ComputeInstance application. + :paramtype display_name: str + :keyword endpoint_uri: Application' endpoint URI. + :paramtype endpoint_uri: str + """ + super().__init__(**kwargs) + self.display_name = display_name + self.endpoint_uri = endpoint_uri + + +class ComputeInstanceAutologgerSettings(_serialization.Model): + """Specifies settings for autologger. + + :ivar mlflow_autologger: Indicates whether mlflow autologger is enabled for notebooks. Known + values are: "Enabled" and "Disabled". + :vartype mlflow_autologger: str or ~azure.mgmt.machinelearningservices.models.MlflowAutologger + """ + + _attribute_map = { + "mlflow_autologger": {"key": "mlflowAutologger", "type": "str"}, + } + + def __init__(self, *, mlflow_autologger: Optional[Union[str, "_models.MlflowAutologger"]] = None, **kwargs): + """ + :keyword mlflow_autologger: Indicates whether mlflow autologger is enabled for notebooks. Known + values are: "Enabled" and "Disabled". + :paramtype mlflow_autologger: str or + ~azure.mgmt.machinelearningservices.models.MlflowAutologger + """ + super().__init__(**kwargs) + self.mlflow_autologger = mlflow_autologger + + +class ComputeInstanceConnectivityEndpoints(_serialization.Model): + """Defines all connectivity endpoints and properties for an ComputeInstance. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar public_ip_address: Public IP Address of this ComputeInstance. + :vartype public_ip_address: str + :ivar private_ip_address: Private IP Address of this ComputeInstance (local to the VNET in + which the compute instance is deployed). + :vartype private_ip_address: str + """ + + _validation = { + "public_ip_address": {"readonly": True}, + "private_ip_address": {"readonly": True}, + } + + _attribute_map = { + "public_ip_address": {"key": "publicIpAddress", "type": "str"}, + "private_ip_address": {"key": "privateIpAddress", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.public_ip_address = None + self.private_ip_address = None + + +class ComputeInstanceContainer(_serialization.Model): + """Defines an Aml Instance container. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Name of the ComputeInstance container. + :vartype name: str + :ivar autosave: Auto save settings. Known values are: "None", "Local", and "Remote". + :vartype autosave: str or ~azure.mgmt.machinelearningservices.models.Autosave + :ivar gpu: Information of GPU. + :vartype gpu: str + :ivar network: network of this container. Known values are: "Bridge" and "Host". + :vartype network: str or ~azure.mgmt.machinelearningservices.models.Network + :ivar environment: Environment information of this container. + :vartype environment: ~azure.mgmt.machinelearningservices.models.ComputeInstanceEnvironmentInfo + :ivar services: services of this containers. + :vartype services: list[JSON] + """ + + _validation = { + "services": {"readonly": True}, + } + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "autosave": {"key": "autosave", "type": "str"}, + "gpu": {"key": "gpu", "type": "str"}, + "network": {"key": "network", "type": "str"}, + "environment": {"key": "environment", "type": "ComputeInstanceEnvironmentInfo"}, + "services": {"key": "services", "type": "[object]"}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + autosave: Optional[Union[str, "_models.Autosave"]] = None, + gpu: Optional[str] = None, + network: Optional[Union[str, "_models.Network"]] = None, + environment: Optional["_models.ComputeInstanceEnvironmentInfo"] = None, + **kwargs + ): + """ + :keyword name: Name of the ComputeInstance container. + :paramtype name: str + :keyword autosave: Auto save settings. Known values are: "None", "Local", and "Remote". + :paramtype autosave: str or ~azure.mgmt.machinelearningservices.models.Autosave + :keyword gpu: Information of GPU. + :paramtype gpu: str + :keyword network: network of this container. Known values are: "Bridge" and "Host". + :paramtype network: str or ~azure.mgmt.machinelearningservices.models.Network + :keyword environment: Environment information of this container. + :paramtype environment: + ~azure.mgmt.machinelearningservices.models.ComputeInstanceEnvironmentInfo + """ + super().__init__(**kwargs) + self.name = name + self.autosave = autosave + self.gpu = gpu + self.network = network + self.environment = environment + self.services = None + + +class ComputeInstanceCreatedBy(_serialization.Model): + """Describes information on user who created this ComputeInstance. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar user_name: Name of the user. + :vartype user_name: str + :ivar user_org_id: Uniquely identifies user' Azure Active Directory organization. + :vartype user_org_id: str + :ivar user_id: Uniquely identifies the user within his/her organization. + :vartype user_id: str + """ + + _validation = { + "user_name": {"readonly": True}, + "user_org_id": {"readonly": True}, + "user_id": {"readonly": True}, + } + + _attribute_map = { + "user_name": {"key": "userName", "type": "str"}, + "user_org_id": {"key": "userOrgId", "type": "str"}, + "user_id": {"key": "userId", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.user_name = None + self.user_org_id = None + self.user_id = None + + +class ComputeInstanceDataDisk(_serialization.Model): + """Defines an Aml Instance DataDisk. + + :ivar caching: Caching type of Data Disk. Known values are: "None", "ReadOnly", and + "ReadWrite". + :vartype caching: str or ~azure.mgmt.machinelearningservices.models.Caching + :ivar disk_size_gb: The initial disk size in gigabytes. + :vartype disk_size_gb: int + :ivar lun: The lun is used to uniquely identify each data disk. If attaching multiple disks, + each should have a distinct lun. + :vartype lun: int + :ivar storage_account_type: type of this storage account. Known values are: "Standard_LRS" and + "Premium_LRS". + :vartype storage_account_type: str or + ~azure.mgmt.machinelearningservices.models.StorageAccountType + """ + + _attribute_map = { + "caching": {"key": "caching", "type": "str"}, + "disk_size_gb": {"key": "diskSizeGB", "type": "int"}, + "lun": {"key": "lun", "type": "int"}, + "storage_account_type": {"key": "storageAccountType", "type": "str"}, + } + + def __init__( + self, + *, + caching: Optional[Union[str, "_models.Caching"]] = None, + disk_size_gb: Optional[int] = None, + lun: Optional[int] = None, + storage_account_type: Union[str, "_models.StorageAccountType"] = "Standard_LRS", + **kwargs + ): + """ + :keyword caching: Caching type of Data Disk. Known values are: "None", "ReadOnly", and + "ReadWrite". + :paramtype caching: str or ~azure.mgmt.machinelearningservices.models.Caching + :keyword disk_size_gb: The initial disk size in gigabytes. + :paramtype disk_size_gb: int + :keyword lun: The lun is used to uniquely identify each data disk. If attaching multiple disks, + each should have a distinct lun. + :paramtype lun: int + :keyword storage_account_type: type of this storage account. Known values are: "Standard_LRS" + and "Premium_LRS". + :paramtype storage_account_type: str or + ~azure.mgmt.machinelearningservices.models.StorageAccountType + """ + super().__init__(**kwargs) + self.caching = caching + self.disk_size_gb = disk_size_gb + self.lun = lun + self.storage_account_type = storage_account_type + + +class ComputeInstanceDataMount(_serialization.Model): + """Defines an Aml Instance DataMount. + + :ivar source: Source of the ComputeInstance data mount. + :vartype source: str + :ivar source_type: Data source type. Known values are: "Dataset", "Datastore", and "URI". + :vartype source_type: str or ~azure.mgmt.machinelearningservices.models.SourceType + :ivar mount_name: name of the ComputeInstance data mount. + :vartype mount_name: str + :ivar mount_action: Mount Action. Known values are: "Mount" and "Unmount". + :vartype mount_action: str or ~azure.mgmt.machinelearningservices.models.MountAction + :ivar created_by: who this data mount created by. + :vartype created_by: str + :ivar mount_path: Path of this data mount. + :vartype mount_path: str + :ivar mount_state: Mount state. Known values are: "MountRequested", "Mounted", "MountFailed", + "UnmountRequested", "UnmountFailed", and "Unmounted". + :vartype mount_state: str or ~azure.mgmt.machinelearningservices.models.MountState + :ivar mounted_on: The time when the disk mounted. + :vartype mounted_on: ~datetime.datetime + :ivar error: Error of this data mount. + :vartype error: str + """ + + _attribute_map = { + "source": {"key": "source", "type": "str"}, + "source_type": {"key": "sourceType", "type": "str"}, + "mount_name": {"key": "mountName", "type": "str"}, + "mount_action": {"key": "mountAction", "type": "str"}, + "created_by": {"key": "createdBy", "type": "str"}, + "mount_path": {"key": "mountPath", "type": "str"}, + "mount_state": {"key": "mountState", "type": "str"}, + "mounted_on": {"key": "mountedOn", "type": "iso-8601"}, + "error": {"key": "error", "type": "str"}, + } + + def __init__( + self, + *, + source: Optional[str] = None, + source_type: Optional[Union[str, "_models.SourceType"]] = None, + mount_name: Optional[str] = None, + mount_action: Optional[Union[str, "_models.MountAction"]] = None, + created_by: Optional[str] = None, + mount_path: Optional[str] = None, + mount_state: Optional[Union[str, "_models.MountState"]] = None, + mounted_on: Optional[datetime.datetime] = None, + error: Optional[str] = None, + **kwargs + ): + """ + :keyword source: Source of the ComputeInstance data mount. + :paramtype source: str + :keyword source_type: Data source type. Known values are: "Dataset", "Datastore", and "URI". + :paramtype source_type: str or ~azure.mgmt.machinelearningservices.models.SourceType + :keyword mount_name: name of the ComputeInstance data mount. + :paramtype mount_name: str + :keyword mount_action: Mount Action. Known values are: "Mount" and "Unmount". + :paramtype mount_action: str or ~azure.mgmt.machinelearningservices.models.MountAction + :keyword created_by: who this data mount created by. + :paramtype created_by: str + :keyword mount_path: Path of this data mount. + :paramtype mount_path: str + :keyword mount_state: Mount state. Known values are: "MountRequested", "Mounted", + "MountFailed", "UnmountRequested", "UnmountFailed", and "Unmounted". + :paramtype mount_state: str or ~azure.mgmt.machinelearningservices.models.MountState + :keyword mounted_on: The time when the disk mounted. + :paramtype mounted_on: ~datetime.datetime + :keyword error: Error of this data mount. + :paramtype error: str + """ + super().__init__(**kwargs) + self.source = source + self.source_type = source_type + self.mount_name = mount_name + self.mount_action = mount_action + self.created_by = created_by + self.mount_path = mount_path + self.mount_state = mount_state + self.mounted_on = mounted_on + self.error = error + + +class ComputeInstanceEnvironmentInfo(_serialization.Model): + """Environment information. + + :ivar name: name of environment. + :vartype name: str + :ivar version: version of environment. + :vartype version: str + """ + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "version": {"key": "version", "type": "str"}, + } + + def __init__(self, *, name: Optional[str] = None, version: Optional[str] = None, **kwargs): + """ + :keyword name: name of environment. + :paramtype name: str + :keyword version: version of environment. + :paramtype version: str + """ + super().__init__(**kwargs) + self.name = name + self.version = version + + +class ComputeInstanceLastOperation(_serialization.Model): + """The last operation on ComputeInstance. + + :ivar operation_name: Name of the last operation. Known values are: "Create", "Start", "Stop", + "Restart", "Reimage", and "Delete". + :vartype operation_name: str or ~azure.mgmt.machinelearningservices.models.OperationName + :ivar operation_time: Time of the last operation. + :vartype operation_time: ~datetime.datetime + :ivar operation_status: Operation status. Known values are: "InProgress", "Succeeded", + "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ReimageFailed", and + "DeleteFailed". + :vartype operation_status: str or ~azure.mgmt.machinelearningservices.models.OperationStatus + :ivar operation_trigger: Trigger of operation. Known values are: "User", "Schedule", and + "IdleShutdown". + :vartype operation_trigger: str or ~azure.mgmt.machinelearningservices.models.OperationTrigger + """ + + _attribute_map = { + "operation_name": {"key": "operationName", "type": "str"}, + "operation_time": {"key": "operationTime", "type": "iso-8601"}, + "operation_status": {"key": "operationStatus", "type": "str"}, + "operation_trigger": {"key": "operationTrigger", "type": "str"}, + } + + def __init__( + self, + *, + operation_name: Optional[Union[str, "_models.OperationName"]] = None, + operation_time: Optional[datetime.datetime] = None, + operation_status: Optional[Union[str, "_models.OperationStatus"]] = None, + operation_trigger: Optional[Union[str, "_models.OperationTrigger"]] = None, + **kwargs + ): + """ + :keyword operation_name: Name of the last operation. Known values are: "Create", "Start", + "Stop", "Restart", "Reimage", and "Delete". + :paramtype operation_name: str or ~azure.mgmt.machinelearningservices.models.OperationName + :keyword operation_time: Time of the last operation. + :paramtype operation_time: ~datetime.datetime + :keyword operation_status: Operation status. Known values are: "InProgress", "Succeeded", + "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ReimageFailed", and + "DeleteFailed". + :paramtype operation_status: str or ~azure.mgmt.machinelearningservices.models.OperationStatus + :keyword operation_trigger: Trigger of operation. Known values are: "User", "Schedule", and + "IdleShutdown". + :paramtype operation_trigger: str or + ~azure.mgmt.machinelearningservices.models.OperationTrigger + """ + super().__init__(**kwargs) + self.operation_name = operation_name + self.operation_time = operation_time + self.operation_status = operation_status + self.operation_trigger = operation_trigger + + +class ComputeInstanceProperties(_serialization.Model): # pylint: disable=too-many-instance-attributes + """Compute Instance properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar vm_size: Virtual Machine Size. + :vartype vm_size: str + :ivar subnet: Virtual network subnet resource ID the compute nodes belong to. + :vartype subnet: ~azure.mgmt.machinelearningservices.models.ResourceId + :ivar application_sharing_policy: Policy for sharing applications on this compute instance + among users of parent workspace. If Personal, only the creator can access applications on this + compute instance. When Shared, any workspace user can access applications on this instance + depending on his/her assigned role. Known values are: "Personal" and "Shared". + :vartype application_sharing_policy: str or + ~azure.mgmt.machinelearningservices.models.ApplicationSharingPolicy + :ivar autologger_settings: Specifies settings for autologger. + :vartype autologger_settings: + ~azure.mgmt.machinelearningservices.models.ComputeInstanceAutologgerSettings + :ivar ssh_settings: Specifies policy and settings for SSH access. + :vartype ssh_settings: ~azure.mgmt.machinelearningservices.models.ComputeInstanceSshSettings + :ivar custom_services: List of Custom Services added to the compute. + :vartype custom_services: list[~azure.mgmt.machinelearningservices.models.CustomService] + :ivar os_image_metadata: Returns metadata about the operating system image for this compute + instance. + :vartype os_image_metadata: ~azure.mgmt.machinelearningservices.models.ImageMetadata + :ivar connectivity_endpoints: Describes all connectivity endpoints available for this + ComputeInstance. + :vartype connectivity_endpoints: + ~azure.mgmt.machinelearningservices.models.ComputeInstanceConnectivityEndpoints + :ivar applications: Describes available applications and their endpoints on this + ComputeInstance. + :vartype applications: + list[~azure.mgmt.machinelearningservices.models.ComputeInstanceApplication] + :ivar created_by: Describes information on user who created this ComputeInstance. + :vartype created_by: ~azure.mgmt.machinelearningservices.models.ComputeInstanceCreatedBy + :ivar errors: Collection of errors encountered on this ComputeInstance. + :vartype errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar state: The current state of this ComputeInstance. Known values are: "Creating", + "CreateFailed", "Deleting", "Running", "Restarting", "JobRunning", "SettingUp", "SetupFailed", + "Starting", "Stopped", "Stopping", "UserSettingUp", "UserSetupFailed", "Unknown", and + "Unusable". + :vartype state: str or ~azure.mgmt.machinelearningservices.models.ComputeInstanceState + :ivar compute_instance_authorization_type: The Compute Instance Authorization type. Available + values are personal (default). "personal" + :vartype compute_instance_authorization_type: str or + ~azure.mgmt.machinelearningservices.models.ComputeInstanceAuthorizationType + :ivar personal_compute_instance_settings: Settings for a personal compute instance. + :vartype personal_compute_instance_settings: + ~azure.mgmt.machinelearningservices.models.PersonalComputeInstanceSettings + :ivar setup_scripts: Details of customized scripts to execute for setting up the cluster. + :vartype setup_scripts: ~azure.mgmt.machinelearningservices.models.SetupScripts + :ivar last_operation: The last operation on ComputeInstance. + :vartype last_operation: + ~azure.mgmt.machinelearningservices.models.ComputeInstanceLastOperation + :ivar schedules: The list of schedules to be applied on the computes. + :vartype schedules: ~azure.mgmt.machinelearningservices.models.ComputeSchedules + :ivar idle_time_before_shutdown: Stops compute instance after user defined period of + inactivity. Time is defined in ISO8601 format. Minimum is 15 min, maximum is 3 days. + :vartype idle_time_before_shutdown: str + :ivar enable_node_public_ip: Enable or disable node public IP address provisioning. Possible + values are: Possible values are: true - Indicates that the compute nodes will have public IPs + provisioned. false - Indicates that the compute nodes will have a private endpoint and no + public IPs. + :vartype enable_node_public_ip: bool + :ivar containers: Describes informations of containers on this ComputeInstance. + :vartype containers: list[~azure.mgmt.machinelearningservices.models.ComputeInstanceContainer] + :ivar data_disks: Describes informations of dataDisks on this ComputeInstance. + :vartype data_disks: list[~azure.mgmt.machinelearningservices.models.ComputeInstanceDataDisk] + :ivar data_mounts: Describes informations of dataMounts on this ComputeInstance. + :vartype data_mounts: list[~azure.mgmt.machinelearningservices.models.ComputeInstanceDataMount] + :ivar versions: ComputeInstance version. + :vartype versions: ~azure.mgmt.machinelearningservices.models.ComputeInstanceVersion + """ + + _validation = { + "os_image_metadata": {"readonly": True}, + "connectivity_endpoints": {"readonly": True}, + "applications": {"readonly": True}, + "created_by": {"readonly": True}, + "errors": {"readonly": True}, + "state": {"readonly": True}, + "last_operation": {"readonly": True}, + "schedules": {"readonly": True}, + "containers": {"readonly": True}, + "data_disks": {"readonly": True}, + "data_mounts": {"readonly": True}, + "versions": {"readonly": True}, + } + + _attribute_map = { + "vm_size": {"key": "vmSize", "type": "str"}, + "subnet": {"key": "subnet", "type": "ResourceId"}, + "application_sharing_policy": {"key": "applicationSharingPolicy", "type": "str"}, + "autologger_settings": {"key": "autologgerSettings", "type": "ComputeInstanceAutologgerSettings"}, + "ssh_settings": {"key": "sshSettings", "type": "ComputeInstanceSshSettings"}, + "custom_services": {"key": "customServices", "type": "[CustomService]"}, + "os_image_metadata": {"key": "osImageMetadata", "type": "ImageMetadata"}, + "connectivity_endpoints": {"key": "connectivityEndpoints", "type": "ComputeInstanceConnectivityEndpoints"}, + "applications": {"key": "applications", "type": "[ComputeInstanceApplication]"}, + "created_by": {"key": "createdBy", "type": "ComputeInstanceCreatedBy"}, + "errors": {"key": "errors", "type": "[ErrorResponse]"}, + "state": {"key": "state", "type": "str"}, + "compute_instance_authorization_type": {"key": "computeInstanceAuthorizationType", "type": "str"}, + "personal_compute_instance_settings": { + "key": "personalComputeInstanceSettings", + "type": "PersonalComputeInstanceSettings", + }, + "setup_scripts": {"key": "setupScripts", "type": "SetupScripts"}, + "last_operation": {"key": "lastOperation", "type": "ComputeInstanceLastOperation"}, + "schedules": {"key": "schedules", "type": "ComputeSchedules"}, + "idle_time_before_shutdown": {"key": "idleTimeBeforeShutdown", "type": "str"}, + "enable_node_public_ip": {"key": "enableNodePublicIp", "type": "bool"}, + "containers": {"key": "containers", "type": "[ComputeInstanceContainer]"}, + "data_disks": {"key": "dataDisks", "type": "[ComputeInstanceDataDisk]"}, + "data_mounts": {"key": "dataMounts", "type": "[ComputeInstanceDataMount]"}, + "versions": {"key": "versions", "type": "ComputeInstanceVersion"}, + } + + def __init__( + self, + *, + vm_size: Optional[str] = None, + subnet: Optional["_models.ResourceId"] = None, + application_sharing_policy: Union[str, "_models.ApplicationSharingPolicy"] = "Shared", + autologger_settings: Optional["_models.ComputeInstanceAutologgerSettings"] = None, + ssh_settings: Optional["_models.ComputeInstanceSshSettings"] = None, + custom_services: Optional[List["_models.CustomService"]] = None, + compute_instance_authorization_type: Union[str, "_models.ComputeInstanceAuthorizationType"] = "personal", + personal_compute_instance_settings: Optional["_models.PersonalComputeInstanceSettings"] = None, + setup_scripts: Optional["_models.SetupScripts"] = None, + idle_time_before_shutdown: Optional[str] = None, + enable_node_public_ip: Optional[bool] = None, + **kwargs + ): + """ + :keyword vm_size: Virtual Machine Size. + :paramtype vm_size: str + :keyword subnet: Virtual network subnet resource ID the compute nodes belong to. + :paramtype subnet: ~azure.mgmt.machinelearningservices.models.ResourceId + :keyword application_sharing_policy: Policy for sharing applications on this compute instance + among users of parent workspace. If Personal, only the creator can access applications on this + compute instance. When Shared, any workspace user can access applications on this instance + depending on his/her assigned role. Known values are: "Personal" and "Shared". + :paramtype application_sharing_policy: str or + ~azure.mgmt.machinelearningservices.models.ApplicationSharingPolicy + :keyword autologger_settings: Specifies settings for autologger. + :paramtype autologger_settings: + ~azure.mgmt.machinelearningservices.models.ComputeInstanceAutologgerSettings + :keyword ssh_settings: Specifies policy and settings for SSH access. + :paramtype ssh_settings: ~azure.mgmt.machinelearningservices.models.ComputeInstanceSshSettings + :keyword custom_services: List of Custom Services added to the compute. + :paramtype custom_services: list[~azure.mgmt.machinelearningservices.models.CustomService] + :keyword compute_instance_authorization_type: The Compute Instance Authorization type. + Available values are personal (default). "personal" + :paramtype compute_instance_authorization_type: str or + ~azure.mgmt.machinelearningservices.models.ComputeInstanceAuthorizationType + :keyword personal_compute_instance_settings: Settings for a personal compute instance. + :paramtype personal_compute_instance_settings: + ~azure.mgmt.machinelearningservices.models.PersonalComputeInstanceSettings + :keyword setup_scripts: Details of customized scripts to execute for setting up the cluster. + :paramtype setup_scripts: ~azure.mgmt.machinelearningservices.models.SetupScripts + :keyword idle_time_before_shutdown: Stops compute instance after user defined period of + inactivity. Time is defined in ISO8601 format. Minimum is 15 min, maximum is 3 days. + :paramtype idle_time_before_shutdown: str + :keyword enable_node_public_ip: Enable or disable node public IP address provisioning. Possible + values are: Possible values are: true - Indicates that the compute nodes will have public IPs + provisioned. false - Indicates that the compute nodes will have a private endpoint and no + public IPs. + :paramtype enable_node_public_ip: bool + """ + super().__init__(**kwargs) + self.vm_size = vm_size + self.subnet = subnet + self.application_sharing_policy = application_sharing_policy + self.autologger_settings = autologger_settings + self.ssh_settings = ssh_settings + self.custom_services = custom_services + self.os_image_metadata = None + self.connectivity_endpoints = None + self.applications = None + self.created_by = None + self.errors = None + self.state = None + self.compute_instance_authorization_type = compute_instance_authorization_type + self.personal_compute_instance_settings = personal_compute_instance_settings + self.setup_scripts = setup_scripts + self.last_operation = None + self.schedules = None + self.idle_time_before_shutdown = idle_time_before_shutdown + self.enable_node_public_ip = enable_node_public_ip + self.containers = None + self.data_disks = None + self.data_mounts = None + self.versions = None + + +class ComputeInstanceSshSettings(_serialization.Model): + """Specifies policy and settings for SSH access. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar ssh_public_access: State of the public SSH port. Possible values are: Disabled - + Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the + public ssh port is open and accessible according to the VNet/subnet policy if applicable. Known + values are: "Enabled" and "Disabled". + :vartype ssh_public_access: str or ~azure.mgmt.machinelearningservices.models.SshPublicAccess + :ivar admin_user_name: Describes the admin user name. + :vartype admin_user_name: str + :ivar ssh_port: Describes the port for connecting through SSH. + :vartype ssh_port: int + :ivar admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen -t + rsa -b 2048" to generate your SSH key pairs. + :vartype admin_public_key: str + """ + + _validation = { + "admin_user_name": {"readonly": True}, + "ssh_port": {"readonly": True}, + } + + _attribute_map = { + "ssh_public_access": {"key": "sshPublicAccess", "type": "str"}, + "admin_user_name": {"key": "adminUserName", "type": "str"}, + "ssh_port": {"key": "sshPort", "type": "int"}, + "admin_public_key": {"key": "adminPublicKey", "type": "str"}, + } + + def __init__( + self, + *, + ssh_public_access: Union[str, "_models.SshPublicAccess"] = "Disabled", + admin_public_key: Optional[str] = None, + **kwargs + ): + """ + :keyword ssh_public_access: State of the public SSH port. Possible values are: Disabled - + Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the + public ssh port is open and accessible according to the VNet/subnet policy if applicable. Known + values are: "Enabled" and "Disabled". + :paramtype ssh_public_access: str or ~azure.mgmt.machinelearningservices.models.SshPublicAccess + :keyword admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen + -t rsa -b 2048" to generate your SSH key pairs. + :paramtype admin_public_key: str + """ + super().__init__(**kwargs) + self.ssh_public_access = ssh_public_access + self.admin_user_name = None + self.ssh_port = None + self.admin_public_key = admin_public_key + + +class ComputeInstanceVersion(_serialization.Model): + """Version of computeInstance. + + :ivar runtime: Runtime of compute instance. + :vartype runtime: str + """ + + _attribute_map = { + "runtime": {"key": "runtime", "type": "str"}, + } + + def __init__(self, *, runtime: Optional[str] = None, **kwargs): + """ + :keyword runtime: Runtime of compute instance. + :paramtype runtime: str + """ + super().__init__(**kwargs) + self.runtime = runtime + + +class ComputeResourceSchema(_serialization.Model): + """ComputeResourceSchema. + + :ivar properties: Compute properties. + :vartype properties: ~azure.mgmt.machinelearningservices.models.Compute + """ + + _attribute_map = { + "properties": {"key": "properties", "type": "Compute"}, + } + + def __init__(self, *, properties: Optional["_models.Compute"] = None, **kwargs): + """ + :keyword properties: Compute properties. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.Compute + """ + super().__init__(**kwargs) + self.properties = properties + + +class ComputeResource(Resource, ComputeResourceSchema): + """Machine Learning compute object wrapped into ARM resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar properties: Compute properties. + :vartype properties: ~azure.mgmt.machinelearningservices.models.Compute + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar identity: The identity of the resource. + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar location: Specifies the location of the resource. + :vartype location: str + :ivar tags: Contains resource tags defined as key/value pairs. + :vartype tags: dict[str, str] + :ivar sku: The sku of the workspace. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "properties": {"key": "properties", "type": "Compute"}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "location": {"key": "location", "type": "str"}, + "tags": {"key": "tags", "type": "{str}"}, + "sku": {"key": "sku", "type": "Sku"}, + } + + def __init__( + self, + *, + properties: Optional["_models.Compute"] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, + location: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + sku: Optional["_models.Sku"] = None, + **kwargs + ): + """ + :keyword properties: Compute properties. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.Compute + :keyword identity: The identity of the resource. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword location: Specifies the location of the resource. + :paramtype location: str + :keyword tags: Contains resource tags defined as key/value pairs. + :paramtype tags: dict[str, str] + :keyword sku: The sku of the workspace. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + super().__init__(properties=properties, **kwargs) + self.properties = properties + self.identity = identity + self.location = location + self.tags = tags + self.sku = sku + self.id = None + self.name = None + self.type = None + self.system_data = None + + +class ComputeSchedules(_serialization.Model): + """The list of schedules to be applied on the computes. + + :ivar compute_start_stop: The list of compute start stop schedules to be applied. + :vartype compute_start_stop: + list[~azure.mgmt.machinelearningservices.models.ComputeStartStopSchedule] + """ + + _attribute_map = { + "compute_start_stop": {"key": "computeStartStop", "type": "[ComputeStartStopSchedule]"}, + } + + def __init__(self, *, compute_start_stop: Optional[List["_models.ComputeStartStopSchedule"]] = None, **kwargs): + """ + :keyword compute_start_stop: The list of compute start stop schedules to be applied. + :paramtype compute_start_stop: + list[~azure.mgmt.machinelearningservices.models.ComputeStartStopSchedule] + """ + super().__init__(**kwargs) + self.compute_start_stop = compute_start_stop + + +class ComputeStartStopSchedule(_serialization.Model): + """Compute start stop schedule properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: A system assigned id for the schedule. + :vartype id: str + :ivar provisioning_status: The current deployment state of schedule. Known values are: + "Completed", "Provisioning", and "Failed". + :vartype provisioning_status: str or + ~azure.mgmt.machinelearningservices.models.ProvisioningStatus + :ivar status: Is the schedule enabled or disabled?. Known values are: "Enabled" and "Disabled". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.ScheduleStatus + :ivar action: [Required] The compute power action. Known values are: "Start" and "Stop". + :vartype action: str or ~azure.mgmt.machinelearningservices.models.ComputePowerAction + :ivar trigger_type: Known values are: "Recurrence" and "Cron". + :vartype trigger_type: str or ~azure.mgmt.machinelearningservices.models.TriggerType + :ivar recurrence: The workflow trigger recurrence for ComputeStartStop schedule type. + :vartype recurrence: ~azure.mgmt.machinelearningservices.models.Recurrence + :ivar cron: The workflow trigger cron for ComputeStartStop schedule type. + :vartype cron: ~azure.mgmt.machinelearningservices.models.Cron + :ivar schedule: [Deprecated] Not used any more. + :vartype schedule: ~azure.mgmt.machinelearningservices.models.ScheduleBase + """ + + _validation = { + "id": {"readonly": True}, + "provisioning_status": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "provisioning_status": {"key": "provisioningStatus", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "action": {"key": "action", "type": "str"}, + "trigger_type": {"key": "triggerType", "type": "str"}, + "recurrence": {"key": "recurrence", "type": "Recurrence"}, + "cron": {"key": "cron", "type": "Cron"}, + "schedule": {"key": "schedule", "type": "ScheduleBase"}, + } + + def __init__( + self, + *, + status: Optional[Union[str, "_models.ScheduleStatus"]] = None, + action: Optional[Union[str, "_models.ComputePowerAction"]] = None, + trigger_type: Optional[Union[str, "_models.TriggerType"]] = None, + recurrence: Optional["_models.Recurrence"] = None, + cron: Optional["_models.Cron"] = None, + schedule: Optional["_models.ScheduleBase"] = None, + **kwargs + ): + """ + :keyword status: Is the schedule enabled or disabled?. Known values are: "Enabled" and + "Disabled". + :paramtype status: str or ~azure.mgmt.machinelearningservices.models.ScheduleStatus + :keyword action: [Required] The compute power action. Known values are: "Start" and "Stop". + :paramtype action: str or ~azure.mgmt.machinelearningservices.models.ComputePowerAction + :keyword trigger_type: Known values are: "Recurrence" and "Cron". + :paramtype trigger_type: str or ~azure.mgmt.machinelearningservices.models.TriggerType + :keyword recurrence: The workflow trigger recurrence for ComputeStartStop schedule type. + :paramtype recurrence: ~azure.mgmt.machinelearningservices.models.Recurrence + :keyword cron: The workflow trigger cron for ComputeStartStop schedule type. + :paramtype cron: ~azure.mgmt.machinelearningservices.models.Cron + :keyword schedule: [Deprecated] Not used any more. + :paramtype schedule: ~azure.mgmt.machinelearningservices.models.ScheduleBase + """ + super().__init__(**kwargs) + self.id = None + self.provisioning_status = None + self.status = status + self.action = action + self.trigger_type = trigger_type + self.recurrence = recurrence + self.cron = cron + self.schedule = schedule + + +class ContainerResourceRequirements(_serialization.Model): + """Resource requirements for each container instance within an online deployment. + + :ivar container_resource_limits: Container resource limit info:. + :vartype container_resource_limits: + ~azure.mgmt.machinelearningservices.models.ContainerResourceSettings + :ivar container_resource_requests: Container resource request info:. + :vartype container_resource_requests: + ~azure.mgmt.machinelearningservices.models.ContainerResourceSettings + """ + + _attribute_map = { + "container_resource_limits": {"key": "containerResourceLimits", "type": "ContainerResourceSettings"}, + "container_resource_requests": {"key": "containerResourceRequests", "type": "ContainerResourceSettings"}, + } + + def __init__( + self, + *, + container_resource_limits: Optional["_models.ContainerResourceSettings"] = None, + container_resource_requests: Optional["_models.ContainerResourceSettings"] = None, + **kwargs + ): + """ + :keyword container_resource_limits: Container resource limit info:. + :paramtype container_resource_limits: + ~azure.mgmt.machinelearningservices.models.ContainerResourceSettings + :keyword container_resource_requests: Container resource request info:. + :paramtype container_resource_requests: + ~azure.mgmt.machinelearningservices.models.ContainerResourceSettings + """ + super().__init__(**kwargs) + self.container_resource_limits = container_resource_limits + self.container_resource_requests = container_resource_requests + + +class ContainerResourceSettings(_serialization.Model): + """ContainerResourceSettings. + + :ivar cpu: Number of vCPUs request/limit for container. More info: + https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/. + :vartype cpu: str + :ivar gpu: Number of Nvidia GPU cards request/limit for container. More info: + https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/. + :vartype gpu: str + :ivar memory: Memory size request/limit for container. More info: + https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/. + :vartype memory: str + """ + + _attribute_map = { + "cpu": {"key": "cpu", "type": "str"}, + "gpu": {"key": "gpu", "type": "str"}, + "memory": {"key": "memory", "type": "str"}, + } + + def __init__(self, *, cpu: Optional[str] = None, gpu: Optional[str] = None, memory: Optional[str] = None, **kwargs): + """ + :keyword cpu: Number of vCPUs request/limit for container. More info: + https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/. + :paramtype cpu: str + :keyword gpu: Number of Nvidia GPU cards request/limit for container. More info: + https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/. + :paramtype gpu: str + :keyword memory: Memory size request/limit for container. More info: + https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/. + :paramtype memory: str + """ + super().__init__(**kwargs) + self.cpu = cpu + self.gpu = gpu + self.memory = memory + + +class CosmosDbSettings(_serialization.Model): + """CosmosDbSettings. + + :ivar collections_throughput: The throughput of the collections in cosmosdb database. + :vartype collections_throughput: int + """ + + _attribute_map = { + "collections_throughput": {"key": "collectionsThroughput", "type": "int"}, + } + + def __init__(self, *, collections_throughput: Optional[int] = None, **kwargs): + """ + :keyword collections_throughput: The throughput of the collections in cosmosdb database. + :paramtype collections_throughput: int + """ + super().__init__(**kwargs) + self.collections_throughput = collections_throughput + + +class Cron(_serialization.Model): + """The workflow trigger cron for ComputeStartStop schedule type. + + :ivar start_time: The start time in yyyy-MM-ddTHH:mm:ss format. + :vartype start_time: str + :ivar time_zone: Specifies time zone in which the schedule runs. + TimeZone should follow Windows time zone format. Refer: + https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. + :vartype time_zone: str + :ivar expression: [Required] Specifies cron expression of schedule. + The expression should follow NCronTab format. + :vartype expression: str + """ + + _attribute_map = { + "start_time": {"key": "startTime", "type": "str"}, + "time_zone": {"key": "timeZone", "type": "str"}, + "expression": {"key": "expression", "type": "str"}, + } + + def __init__( + self, *, start_time: Optional[str] = None, time_zone: str = "UTC", expression: Optional[str] = None, **kwargs + ): + """ + :keyword start_time: The start time in yyyy-MM-ddTHH:mm:ss format. + :paramtype start_time: str + :keyword time_zone: Specifies time zone in which the schedule runs. + TimeZone should follow Windows time zone format. Refer: + https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. + :paramtype time_zone: str + :keyword expression: [Required] Specifies cron expression of schedule. + The expression should follow NCronTab format. + :paramtype expression: str + """ + super().__init__(**kwargs) + self.start_time = start_time + self.time_zone = time_zone + self.expression = expression + + +class TriggerBase(_serialization.Model): + """TriggerBase. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CronTrigger, RecurrenceTrigger + + All required parameters must be populated in order to send to Azure. + + :ivar end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer + https://en.wikipedia.org/wiki/ISO_8601. + Recommented format would be "2022-06-01T00:00:01" + If not present, the schedule will run indefinitely. + :vartype end_time: str + :ivar start_time: Specifies start time of schedule in ISO 8601 format, but without a UTC + offset. + :vartype start_time: str + :ivar time_zone: Specifies time zone in which the schedule runs. + TimeZone should follow Windows time zone format. Refer: + https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. + :vartype time_zone: str + :ivar trigger_type: [Required]. Required. Known values are: "Recurrence" and "Cron". + :vartype trigger_type: str or ~azure.mgmt.machinelearningservices.models.TriggerType + """ + + _validation = { + "trigger_type": {"required": True}, + } + + _attribute_map = { + "end_time": {"key": "endTime", "type": "str"}, + "start_time": {"key": "startTime", "type": "str"}, + "time_zone": {"key": "timeZone", "type": "str"}, + "trigger_type": {"key": "triggerType", "type": "str"}, + } + + _subtype_map = {"trigger_type": {"Cron": "CronTrigger", "Recurrence": "RecurrenceTrigger"}} + + def __init__( + self, *, end_time: Optional[str] = None, start_time: Optional[str] = None, time_zone: str = "UTC", **kwargs + ): + """ + :keyword end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer + https://en.wikipedia.org/wiki/ISO_8601. + Recommented format would be "2022-06-01T00:00:01" + If not present, the schedule will run indefinitely. + :paramtype end_time: str + :keyword start_time: Specifies start time of schedule in ISO 8601 format, but without a UTC + offset. + :paramtype start_time: str + :keyword time_zone: Specifies time zone in which the schedule runs. + TimeZone should follow Windows time zone format. Refer: + https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. + :paramtype time_zone: str + """ + super().__init__(**kwargs) + self.end_time = end_time + self.start_time = start_time + self.time_zone = time_zone + self.trigger_type = None # type: Optional[str] + + +class CronTrigger(TriggerBase): + """CronTrigger. + + All required parameters must be populated in order to send to Azure. + + :ivar end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer + https://en.wikipedia.org/wiki/ISO_8601. + Recommented format would be "2022-06-01T00:00:01" + If not present, the schedule will run indefinitely. + :vartype end_time: str + :ivar start_time: Specifies start time of schedule in ISO 8601 format, but without a UTC + offset. + :vartype start_time: str + :ivar time_zone: Specifies time zone in which the schedule runs. + TimeZone should follow Windows time zone format. Refer: + https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. + :vartype time_zone: str + :ivar trigger_type: [Required]. Required. Known values are: "Recurrence" and "Cron". + :vartype trigger_type: str or ~azure.mgmt.machinelearningservices.models.TriggerType + :ivar expression: [Required] Specifies cron expression of schedule. + The expression should follow NCronTab format. Required. + :vartype expression: str + """ + + _validation = { + "trigger_type": {"required": True}, + "expression": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "end_time": {"key": "endTime", "type": "str"}, + "start_time": {"key": "startTime", "type": "str"}, + "time_zone": {"key": "timeZone", "type": "str"}, + "trigger_type": {"key": "triggerType", "type": "str"}, + "expression": {"key": "expression", "type": "str"}, + } + + def __init__( + self, + *, + expression: str, + end_time: Optional[str] = None, + start_time: Optional[str] = None, + time_zone: str = "UTC", + **kwargs + ): + """ + :keyword end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer + https://en.wikipedia.org/wiki/ISO_8601. + Recommented format would be "2022-06-01T00:00:01" + If not present, the schedule will run indefinitely. + :paramtype end_time: str + :keyword start_time: Specifies start time of schedule in ISO 8601 format, but without a UTC + offset. + :paramtype start_time: str + :keyword time_zone: Specifies time zone in which the schedule runs. + TimeZone should follow Windows time zone format. Refer: + https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. + :paramtype time_zone: str + :keyword expression: [Required] Specifies cron expression of schedule. + The expression should follow NCronTab format. Required. + :paramtype expression: str + """ + super().__init__(end_time=end_time, start_time=start_time, time_zone=time_zone, **kwargs) + self.trigger_type = "Cron" # type: str + self.expression = expression + + +class CsvExportSummary(ExportSummary): + """CsvExportSummary. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar end_date_time: The time when the export was completed. + :vartype end_date_time: ~datetime.datetime + :ivar exported_row_count: The total number of labeled datapoints exported. + :vartype exported_row_count: int + :ivar format: [Required] The format of exported labels, also as the discriminator. Required. + Known values are: "Dataset", "Coco", and "CSV". + :vartype format: str or ~azure.mgmt.machinelearningservices.models.ExportFormatType + :ivar labeling_job_id: Name and identifier of the job containing exported labels. + :vartype labeling_job_id: str + :ivar start_date_time: The time when the export was requested. + :vartype start_date_time: ~datetime.datetime + :ivar container_name: The container name to which the labels will be exported. + :vartype container_name: str + :ivar snapshot_path: The output path where the labels will be exported. + :vartype snapshot_path: str + """ + + _validation = { + "end_date_time": {"readonly": True}, + "exported_row_count": {"readonly": True}, + "format": {"required": True}, + "labeling_job_id": {"readonly": True}, + "start_date_time": {"readonly": True}, + "container_name": {"readonly": True}, + "snapshot_path": {"readonly": True}, + } + + _attribute_map = { + "end_date_time": {"key": "endDateTime", "type": "iso-8601"}, + "exported_row_count": {"key": "exportedRowCount", "type": "int"}, + "format": {"key": "format", "type": "str"}, + "labeling_job_id": {"key": "labelingJobId", "type": "str"}, + "start_date_time": {"key": "startDateTime", "type": "iso-8601"}, + "container_name": {"key": "containerName", "type": "str"}, + "snapshot_path": {"key": "snapshotPath", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.format = "CSV" # type: str + self.container_name = None + self.snapshot_path = None + + +class CustomForecastHorizon(ForecastHorizon): + """The desired maximum forecast horizon in units of time-series frequency. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: [Required] Set forecast horizon value selection mode. Required. Known values are: + "Auto" and "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.ForecastHorizonMode + :ivar value: [Required] Forecast horizon value. Required. + :vartype value: int + """ + + _validation = { + "mode": {"required": True}, + "value": {"required": True}, + } + + _attribute_map = { + "mode": {"key": "mode", "type": "str"}, + "value": {"key": "value", "type": "int"}, + } + + def __init__(self, *, value: int, **kwargs): + """ + :keyword value: [Required] Forecast horizon value. Required. + :paramtype value: int + """ + super().__init__(**kwargs) + self.mode = "Custom" # type: str + self.value = value + + +class JobInput(_serialization.Model): + """Command job definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CustomModelJobInput, LiteralJobInput, MLFlowModelJobInput, MLTableJobInput, + TritonModelJobInput, UriFileJobInput, UriFolderJobInput + + All required parameters must be populated in order to send to Azure. + + :ivar description: Description for the input. + :vartype description: str + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + """ + + _validation = { + "job_input_type": {"required": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + } + + _subtype_map = { + "job_input_type": { + "custom_model": "CustomModelJobInput", + "literal": "LiteralJobInput", + "mlflow_model": "MLFlowModelJobInput", + "mltable": "MLTableJobInput", + "triton_model": "TritonModelJobInput", + "uri_file": "UriFileJobInput", + "uri_folder": "UriFolderJobInput", + } + } + + def __init__(self, *, description: Optional[str] = None, **kwargs): + """ + :keyword description: Description for the input. + :paramtype description: str + """ + super().__init__(**kwargs) + self.description = description + self.job_input_type = None # type: Optional[str] + + +class CustomModelJobInput(AssetJobInput, JobInput): + """CustomModelJobInput. + + All required parameters must be populated in order to send to Azure. + + :ivar description: Description for the input. + :vartype description: str + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :ivar uri: [Required] Input Asset URI. Required. + :vartype uri: str + """ + + _validation = { + "job_input_type": {"required": True}, + "uri": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + } + + def __init__( + self, + *, + uri: str, + description: Optional[str] = None, + mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, + **kwargs + ): + """ + :keyword description: Description for the input. + :paramtype description: str + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :keyword uri: [Required] Input Asset URI. Required. + :paramtype uri: str + """ + super().__init__(mode=mode, uri=uri, description=description, **kwargs) + self.description = description + self.job_input_type = "custom_model" # type: str + self.mode = mode + self.uri = uri + + +class JobOutput(_serialization.Model): + """Job output definition container information on where to find job output/logs. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CustomModelJobOutput, MLFlowModelJobOutput, MLTableJobOutput, TritonModelJobOutput, + UriFileJobOutput, UriFolderJobOutput + + All required parameters must be populated in order to send to Azure. + + :ivar description: Description for the output. + :vartype description: str + :ivar job_output_type: [Required] Specifies the type of job. Required. Known values are: + "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". + :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType + """ + + _validation = { + "job_output_type": {"required": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, + } + + _subtype_map = { + "job_output_type": { + "custom_model": "CustomModelJobOutput", + "mlflow_model": "MLFlowModelJobOutput", + "mltable": "MLTableJobOutput", + "triton_model": "TritonModelJobOutput", + "uri_file": "UriFileJobOutput", + "uri_folder": "UriFolderJobOutput", + } + } + + def __init__(self, *, description: Optional[str] = None, **kwargs): + """ + :keyword description: Description for the output. + :paramtype description: str + """ + super().__init__(**kwargs) + self.description = description + self.job_output_type = None # type: Optional[str] + + +class CustomModelJobOutput(AssetJobOutput, JobOutput): + """CustomModelJobOutput. + + All required parameters must be populated in order to send to Azure. + + :ivar description: Description for the output. + :vartype description: str + :ivar job_output_type: [Required] Specifies the type of job. Required. Known values are: + "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". + :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :ivar uri: Output Asset URI. + :vartype uri: str + """ + + _validation = { + "job_output_type": {"required": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, + uri: Optional[str] = None, + **kwargs + ): + """ + :keyword description: Description for the output. + :paramtype description: str + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :keyword uri: Output Asset URI. + :paramtype uri: str + """ + super().__init__(mode=mode, uri=uri, description=description, **kwargs) + self.description = description + self.job_output_type = "custom_model" # type: str + self.mode = mode + self.uri = uri + + +class CustomNCrossValidations(NCrossValidations): + """N-Cross validations are specified by user. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: [Required] Mode for determining N-Cross validations. Required. Known values are: + "Auto" and "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.NCrossValidationsMode + :ivar value: [Required] N-Cross validations value. Required. + :vartype value: int + """ + + _validation = { + "mode": {"required": True}, + "value": {"required": True}, + } + + _attribute_map = { + "mode": {"key": "mode", "type": "str"}, + "value": {"key": "value", "type": "int"}, + } + + def __init__(self, *, value: int, **kwargs): + """ + :keyword value: [Required] N-Cross validations value. Required. + :paramtype value: int + """ + super().__init__(**kwargs) + self.mode = "Custom" # type: str + self.value = value + + +class CustomSeasonality(Seasonality): + """CustomSeasonality. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: [Required] Seasonality mode. Required. Known values are: "Auto" and "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.SeasonalityMode + :ivar value: [Required] Seasonality value. Required. + :vartype value: int + """ + + _validation = { + "mode": {"required": True}, + "value": {"required": True}, + } + + _attribute_map = { + "mode": {"key": "mode", "type": "str"}, + "value": {"key": "value", "type": "int"}, + } + + def __init__(self, *, value: int, **kwargs): + """ + :keyword value: [Required] Seasonality value. Required. + :paramtype value: int + """ + super().__init__(**kwargs) + self.mode = "Custom" # type: str + self.value = value + + +class CustomService(_serialization.Model): + """Specifies the custom service configuration. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Name of the Custom Service. + :vartype name: str + :ivar image: Describes the Image Specifications. + :vartype image: ~azure.mgmt.machinelearningservices.models.Image + :ivar environment_variables: Environment Variable for the container. + :vartype environment_variables: dict[str, + ~azure.mgmt.machinelearningservices.models.EnvironmentVariable] + :ivar docker: Describes the docker settings for the image. + :vartype docker: ~azure.mgmt.machinelearningservices.models.Docker + :ivar endpoints: Configuring the endpoints for the container. + :vartype endpoints: list[~azure.mgmt.machinelearningservices.models.Endpoint] + :ivar volumes: Configuring the volumes for the container. + :vartype volumes: list[~azure.mgmt.machinelearningservices.models.VolumeDefinition] + """ + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "image": {"key": "image", "type": "Image"}, + "environment_variables": {"key": "environmentVariables", "type": "{EnvironmentVariable}"}, + "docker": {"key": "docker", "type": "Docker"}, + "endpoints": {"key": "endpoints", "type": "[Endpoint]"}, + "volumes": {"key": "volumes", "type": "[VolumeDefinition]"}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, Any]] = None, + name: Optional[str] = None, + image: Optional["_models.Image"] = None, + environment_variables: Optional[Dict[str, "_models.EnvironmentVariable"]] = None, + docker: Optional["_models.Docker"] = None, + endpoints: Optional[List["_models.Endpoint"]] = None, + volumes: Optional[List["_models.VolumeDefinition"]] = None, + **kwargs + ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Name of the Custom Service. + :paramtype name: str + :keyword image: Describes the Image Specifications. + :paramtype image: ~azure.mgmt.machinelearningservices.models.Image + :keyword environment_variables: Environment Variable for the container. + :paramtype environment_variables: dict[str, + ~azure.mgmt.machinelearningservices.models.EnvironmentVariable] + :keyword docker: Describes the docker settings for the image. + :paramtype docker: ~azure.mgmt.machinelearningservices.models.Docker + :keyword endpoints: Configuring the endpoints for the container. + :paramtype endpoints: list[~azure.mgmt.machinelearningservices.models.Endpoint] + :keyword volumes: Configuring the volumes for the container. + :paramtype volumes: list[~azure.mgmt.machinelearningservices.models.VolumeDefinition] + """ + super().__init__(**kwargs) + self.additional_properties = additional_properties + self.name = name + self.image = image + self.environment_variables = environment_variables + self.docker = docker + self.endpoints = endpoints + self.volumes = volumes + + +class CustomTargetLags(TargetLags): + """CustomTargetLags. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: [Required] Set target lags mode - Auto/Custom. Required. Known values are: "Auto" + and "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetLagsMode + :ivar values: [Required] Set target lags values. Required. + :vartype values: list[int] + """ + + _validation = { + "mode": {"required": True}, + "values": {"required": True}, + } + + _attribute_map = { + "mode": {"key": "mode", "type": "str"}, + "values": {"key": "values", "type": "[int]"}, + } + + def __init__(self, *, values: List[int], **kwargs): + """ + :keyword values: [Required] Set target lags values. Required. + :paramtype values: list[int] + """ + super().__init__(**kwargs) + self.mode = "Custom" # type: str + self.values = values + + +class CustomTargetRollingWindowSize(TargetRollingWindowSize): + """CustomTargetRollingWindowSize. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: [Required] TargetRollingWindowSiz detection mode. Required. Known values are: + "Auto" and "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetRollingWindowSizeMode + :ivar value: [Required] TargetRollingWindowSize value. Required. + :vartype value: int + """ + + _validation = { + "mode": {"required": True}, + "value": {"required": True}, + } + + _attribute_map = { + "mode": {"key": "mode", "type": "str"}, + "value": {"key": "value", "type": "int"}, + } + + def __init__(self, *, value: int, **kwargs): + """ + :keyword value: [Required] TargetRollingWindowSize value. Required. + :paramtype value: int + """ + super().__init__(**kwargs) + self.mode = "Custom" # type: str + self.value = value + + +class DatabricksSchema(_serialization.Model): + """DatabricksSchema. + + :ivar properties: Properties of Databricks. + :vartype properties: ~azure.mgmt.machinelearningservices.models.DatabricksProperties + """ + + _attribute_map = { + "properties": {"key": "properties", "type": "DatabricksProperties"}, + } + + def __init__(self, *, properties: Optional["_models.DatabricksProperties"] = None, **kwargs): + """ + :keyword properties: Properties of Databricks. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.DatabricksProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class Databricks(Compute, DatabricksSchema): # pylint: disable=too-many-instance-attributes + """A DataFactory compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar properties: Properties of Databricks. + :vartype properties: ~azure.mgmt.machinelearningservices.models.DatabricksProperties + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ComputeProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: ~datetime.datetime + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool + """ + + _validation = { + "compute_type": {"required": True}, + "compute_location": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, + "disable_local_auth": {"readonly": True}, + } + + _attribute_map = { + "properties": {"key": "properties", "type": "DatabricksProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, + } + + def __init__( + self, + *, + properties: Optional["_models.DatabricksProperties"] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + **kwargs + ): + """ + :keyword properties: Properties of Databricks. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.DatabricksProperties + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + """ + super().__init__(description=description, resource_id=resource_id, properties=properties, **kwargs) + self.properties = properties + self.compute_type = "Databricks" # type: str + self.compute_location = None + self.provisioning_state = None + self.description = description + self.created_on = None + self.modified_on = None + self.resource_id = resource_id + self.provisioning_errors = None + self.is_attached_compute = None + self.disable_local_auth = None + + +class DatabricksComputeSecretsProperties(_serialization.Model): + """Properties of Databricks Compute Secrets. + + :ivar databricks_access_token: access token for databricks account. + :vartype databricks_access_token: str + """ + + _attribute_map = { + "databricks_access_token": {"key": "databricksAccessToken", "type": "str"}, + } + + def __init__(self, *, databricks_access_token: Optional[str] = None, **kwargs): + """ + :keyword databricks_access_token: access token for databricks account. + :paramtype databricks_access_token: str + """ + super().__init__(**kwargs) + self.databricks_access_token = databricks_access_token + + +class DatabricksComputeSecrets(ComputeSecrets, DatabricksComputeSecretsProperties): + """Secrets related to a Machine Learning compute based on Databricks. + + All required parameters must be populated in order to send to Azure. + + :ivar databricks_access_token: access token for databricks account. + :vartype databricks_access_token: str + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + """ + + _validation = { + "compute_type": {"required": True}, + } + + _attribute_map = { + "databricks_access_token": {"key": "databricksAccessToken", "type": "str"}, + "compute_type": {"key": "computeType", "type": "str"}, + } + + def __init__(self, *, databricks_access_token: Optional[str] = None, **kwargs): + """ + :keyword databricks_access_token: access token for databricks account. + :paramtype databricks_access_token: str + """ + super().__init__(databricks_access_token=databricks_access_token, **kwargs) + self.databricks_access_token = databricks_access_token + self.compute_type = "Databricks" # type: str + + +class DatabricksProperties(_serialization.Model): + """Properties of Databricks. + + :ivar databricks_access_token: Databricks access token. + :vartype databricks_access_token: str + :ivar workspace_url: Workspace Url. + :vartype workspace_url: str + """ + + _attribute_map = { + "databricks_access_token": {"key": "databricksAccessToken", "type": "str"}, + "workspace_url": {"key": "workspaceUrl", "type": "str"}, + } + + def __init__(self, *, databricks_access_token: Optional[str] = None, workspace_url: Optional[str] = None, **kwargs): + """ + :keyword databricks_access_token: Databricks access token. + :paramtype databricks_access_token: str + :keyword workspace_url: Workspace Url. + :paramtype workspace_url: str + """ + super().__init__(**kwargs) + self.databricks_access_token = databricks_access_token + self.workspace_url = workspace_url + + +class DataContainer(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.DataContainerProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "DataContainerProperties"}, + } + + def __init__(self, *, properties: "_models.DataContainerProperties", **kwargs): + """ + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.DataContainerProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class DataContainerProperties(AssetContainer): + """Container for data asset versions. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar latest_version: The latest version inside this container. + :vartype latest_version: str + :ivar next_version: The next auto incremental version. + :vartype next_version: str + :ivar data_type: [Required] Specifies the type of data. Required. Known values are: "uri_file", + "uri_folder", and "mltable". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType + """ + + _validation = { + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, + "data_type": {"required": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, + "data_type": {"key": "dataType", "type": "str"}, + } + + def __init__( + self, + *, + data_type: Union[str, "_models.DataType"], + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_archived: bool = False, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword data_type: [Required] Specifies the type of data. Required. Known values are: + "uri_file", "uri_folder", and "mltable". + :paramtype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType + """ + super().__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) + self.data_type = data_type + + +class DataContainerResourceArmPaginatedResult(_serialization.Model): + """A paginated list of DataContainer entities. + + :ivar next_link: The link to the next page of DataContainer objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type DataContainer. + :vartype value: list[~azure.mgmt.machinelearningservices.models.DataContainer] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[DataContainer]"}, + } + + def __init__( + self, *, next_link: Optional[str] = None, value: Optional[List["_models.DataContainer"]] = None, **kwargs + ): + """ + :keyword next_link: The link to the next page of DataContainer objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type DataContainer. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.DataContainer] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class DataFactory(Compute): + """A DataFactory compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ComputeProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: ~datetime.datetime + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool + """ + + _validation = { + "compute_type": {"required": True}, + "compute_location": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, + "disable_local_auth": {"readonly": True}, + } + + _attribute_map = { + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, + } + + def __init__(self, *, description: Optional[str] = None, resource_id: Optional[str] = None, **kwargs): + """ + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + """ + super().__init__(description=description, resource_id=resource_id, **kwargs) + self.compute_type = "DataFactory" # type: str + + +class DataLakeAnalyticsSchema(_serialization.Model): + """DataLakeAnalyticsSchema. + + :ivar properties: + :vartype properties: + ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsSchemaProperties + """ + + _attribute_map = { + "properties": {"key": "properties", "type": "DataLakeAnalyticsSchemaProperties"}, + } + + def __init__(self, *, properties: Optional["_models.DataLakeAnalyticsSchemaProperties"] = None, **kwargs): + """ + :keyword properties: + :paramtype properties: + ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsSchemaProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class DataLakeAnalytics(Compute, DataLakeAnalyticsSchema): # pylint: disable=too-many-instance-attributes + """A DataLakeAnalytics compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar properties: + :vartype properties: + ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsSchemaProperties + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ComputeProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: ~datetime.datetime + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool + """ + + _validation = { + "compute_type": {"required": True}, + "compute_location": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, + "disable_local_auth": {"readonly": True}, + } + + _attribute_map = { + "properties": {"key": "properties", "type": "DataLakeAnalyticsSchemaProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, + } + + def __init__( + self, + *, + properties: Optional["_models.DataLakeAnalyticsSchemaProperties"] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + **kwargs + ): + """ + :keyword properties: + :paramtype properties: + ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsSchemaProperties + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + """ + super().__init__(description=description, resource_id=resource_id, properties=properties, **kwargs) + self.properties = properties + self.compute_type = "DataLakeAnalytics" # type: str + self.compute_location = None + self.provisioning_state = None + self.description = description + self.created_on = None + self.modified_on = None + self.resource_id = resource_id + self.provisioning_errors = None + self.is_attached_compute = None + self.disable_local_auth = None + + +class DataLakeAnalyticsSchemaProperties(_serialization.Model): + """DataLakeAnalyticsSchemaProperties. + + :ivar data_lake_store_account_name: DataLake Store Account Name. + :vartype data_lake_store_account_name: str + """ + + _attribute_map = { + "data_lake_store_account_name": {"key": "dataLakeStoreAccountName", "type": "str"}, + } + + def __init__(self, *, data_lake_store_account_name: Optional[str] = None, **kwargs): + """ + :keyword data_lake_store_account_name: DataLake Store Account Name. + :paramtype data_lake_store_account_name: str + """ + super().__init__(**kwargs) + self.data_lake_store_account_name = data_lake_store_account_name + + +class DataPathAssetReference(AssetReferenceBase): + """Reference to an asset via its path in a datastore. + + All required parameters must be populated in order to send to Azure. + + :ivar reference_type: [Required] Specifies the type of asset reference. Required. Known values + are: "Id", "DataPath", and "OutputPath". + :vartype reference_type: str or ~azure.mgmt.machinelearningservices.models.ReferenceType + :ivar datastore_id: ARM resource ID of the datastore where the asset is located. + :vartype datastore_id: str + :ivar path: The path of the file/directory in the datastore. + :vartype path: str + """ + + _validation = { + "reference_type": {"required": True}, + } + + _attribute_map = { + "reference_type": {"key": "referenceType", "type": "str"}, + "datastore_id": {"key": "datastoreId", "type": "str"}, + "path": {"key": "path", "type": "str"}, + } + + def __init__(self, *, datastore_id: Optional[str] = None, path: Optional[str] = None, **kwargs): + """ + :keyword datastore_id: ARM resource ID of the datastore where the asset is located. + :paramtype datastore_id: str + :keyword path: The path of the file/directory in the datastore. + :paramtype path: str + """ + super().__init__(**kwargs) + self.reference_type = "DataPath" # type: str + self.datastore_id = datastore_id + self.path = path + + +class DatasetExportSummary(ExportSummary): + """DatasetExportSummary. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar end_date_time: The time when the export was completed. + :vartype end_date_time: ~datetime.datetime + :ivar exported_row_count: The total number of labeled datapoints exported. + :vartype exported_row_count: int + :ivar format: [Required] The format of exported labels, also as the discriminator. Required. + Known values are: "Dataset", "Coco", and "CSV". + :vartype format: str or ~azure.mgmt.machinelearningservices.models.ExportFormatType + :ivar labeling_job_id: Name and identifier of the job containing exported labels. + :vartype labeling_job_id: str + :ivar start_date_time: The time when the export was requested. + :vartype start_date_time: ~datetime.datetime + :ivar labeled_asset_name: The unique name of the labeled data asset. + :vartype labeled_asset_name: str + """ + + _validation = { + "end_date_time": {"readonly": True}, + "exported_row_count": {"readonly": True}, + "format": {"required": True}, + "labeling_job_id": {"readonly": True}, + "start_date_time": {"readonly": True}, + "labeled_asset_name": {"readonly": True}, + } + + _attribute_map = { + "end_date_time": {"key": "endDateTime", "type": "iso-8601"}, + "exported_row_count": {"key": "exportedRowCount", "type": "int"}, + "format": {"key": "format", "type": "str"}, + "labeling_job_id": {"key": "labelingJobId", "type": "str"}, + "start_date_time": {"key": "startDateTime", "type": "iso-8601"}, + "labeled_asset_name": {"key": "labeledAssetName", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.format = "Dataset" # type: str + self.labeled_asset_name = None + + +class Datastore(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.DatastoreProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "DatastoreProperties"}, + } + + def __init__(self, *, properties: "_models.DatastoreProperties", **kwargs): + """ + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.DatastoreProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class DatastoreResourceArmPaginatedResult(_serialization.Model): + """A paginated list of Datastore entities. + + :ivar next_link: The link to the next page of Datastore objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type Datastore. + :vartype value: list[~azure.mgmt.machinelearningservices.models.Datastore] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[Datastore]"}, + } + + def __init__(self, *, next_link: Optional[str] = None, value: Optional[List["_models.Datastore"]] = None, **kwargs): + """ + :keyword next_link: The link to the next page of Datastore objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type Datastore. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.Datastore] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class DataVersionBase(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.DataVersionBaseProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "DataVersionBaseProperties"}, + } + + def __init__(self, *, properties: "_models.DataVersionBaseProperties", **kwargs): + """ + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.DataVersionBaseProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class DataVersionBaseProperties(AssetBase): + """Data version base definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + MLTableData, UriFileDataVersion, UriFolderDataVersion + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_anonymous: If the name version are system generated (anonymous registration). + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar data_type: [Required] Specifies the type of data. Required. Known values are: "uri_file", + "uri_folder", and "mltable". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType + :ivar data_uri: [Required] Uri of the data. Usage/meaning depends on + Microsoft.MachineLearning.ManagementFrontEnd.Contracts.V20221001Preview.Assets.DataVersionBase.DataType. + Required. + :vartype data_uri: str + """ + + _validation = { + "data_type": {"required": True}, + "data_uri": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "data_type": {"key": "dataType", "type": "str"}, + "data_uri": {"key": "dataUri", "type": "str"}, + } + + _subtype_map = { + "data_type": {"mltable": "MLTableData", "uri_file": "UriFileDataVersion", "uri_folder": "UriFolderDataVersion"} + } + + def __init__( + self, + *, + data_uri: str, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_anonymous: bool = False, + is_archived: bool = False, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_anonymous: If the name version are system generated (anonymous registration). + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword data_uri: [Required] Uri of the data. Usage/meaning depends on + Microsoft.MachineLearning.ManagementFrontEnd.Contracts.V20221001Preview.Assets.DataVersionBase.DataType. + Required. + :paramtype data_uri: str + """ + super().__init__( + description=description, + properties=properties, + tags=tags, + is_anonymous=is_anonymous, + is_archived=is_archived, + **kwargs + ) + self.data_type = None # type: Optional[str] + self.data_uri = data_uri + + +class DataVersionBaseResourceArmPaginatedResult(_serialization.Model): + """A paginated list of DataVersionBase entities. + + :ivar next_link: The link to the next page of DataVersionBase objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type DataVersionBase. + :vartype value: list[~azure.mgmt.machinelearningservices.models.DataVersionBase] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[DataVersionBase]"}, + } + + def __init__( + self, *, next_link: Optional[str] = None, value: Optional[List["_models.DataVersionBase"]] = None, **kwargs + ): + """ + :keyword next_link: The link to the next page of DataVersionBase objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type DataVersionBase. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.DataVersionBase] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class OnlineScaleSettings(_serialization.Model): + """Online deployment scaling configuration. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + DefaultScaleSettings, TargetUtilizationScaleSettings + + All required parameters must be populated in order to send to Azure. + + :ivar scale_type: [Required] Type of deployment scaling algorithm. Required. Known values are: + "Default" and "TargetUtilization". + :vartype scale_type: str or ~azure.mgmt.machinelearningservices.models.ScaleType + """ + + _validation = { + "scale_type": {"required": True}, + } + + _attribute_map = { + "scale_type": {"key": "scaleType", "type": "str"}, + } + + _subtype_map = { + "scale_type": {"Default": "DefaultScaleSettings", "TargetUtilization": "TargetUtilizationScaleSettings"} + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.scale_type = None # type: Optional[str] + + +class DefaultScaleSettings(OnlineScaleSettings): + """DefaultScaleSettings. + + All required parameters must be populated in order to send to Azure. + + :ivar scale_type: [Required] Type of deployment scaling algorithm. Required. Known values are: + "Default" and "TargetUtilization". + :vartype scale_type: str or ~azure.mgmt.machinelearningservices.models.ScaleType + """ + + _validation = { + "scale_type": {"required": True}, + } + + _attribute_map = { + "scale_type": {"key": "scaleType", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.scale_type = "Default" # type: str + + +class DeploymentLogs(_serialization.Model): + """DeploymentLogs. + + :ivar content: The retrieved online deployment logs. + :vartype content: str + """ + + _attribute_map = { + "content": {"key": "content", "type": "str"}, + } + + def __init__(self, *, content: Optional[str] = None, **kwargs): + """ + :keyword content: The retrieved online deployment logs. + :paramtype content: str + """ + super().__init__(**kwargs) + self.content = content + + +class DeploymentLogsRequest(_serialization.Model): + """DeploymentLogsRequest. + + :ivar container_type: The type of container to retrieve logs from. Known values are: + "StorageInitializer" and "InferenceServer". + :vartype container_type: str or ~azure.mgmt.machinelearningservices.models.ContainerType + :ivar tail: The maximum number of lines to tail. + :vartype tail: int + """ + + _attribute_map = { + "container_type": {"key": "containerType", "type": "str"}, + "tail": {"key": "tail", "type": "int"}, + } + + def __init__( + self, + *, + container_type: Optional[Union[str, "_models.ContainerType"]] = None, + tail: Optional[int] = None, + **kwargs + ): + """ + :keyword container_type: The type of container to retrieve logs from. Known values are: + "StorageInitializer" and "InferenceServer". + :paramtype container_type: str or ~azure.mgmt.machinelearningservices.models.ContainerType + :keyword tail: The maximum number of lines to tail. + :paramtype tail: int + """ + super().__init__(**kwargs) + self.container_type = container_type + self.tail = tail + + +class ResourceConfiguration(_serialization.Model): + """ResourceConfiguration. + + :ivar instance_count: Optional number of instances or nodes used by the compute target. + :vartype instance_count: int + :ivar instance_type: Optional type of VM used as supported by the compute target. + :vartype instance_type: str + :ivar properties: Additional properties bag. + :vartype properties: dict[str, JSON] + """ + + _attribute_map = { + "instance_count": {"key": "instanceCount", "type": "int"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "properties": {"key": "properties", "type": "{object}"}, + } + + def __init__( + self, + *, + instance_count: int = 1, + instance_type: Optional[str] = None, + properties: Optional[Dict[str, JSON]] = None, + **kwargs + ): + """ + :keyword instance_count: Optional number of instances or nodes used by the compute target. + :paramtype instance_count: int + :keyword instance_type: Optional type of VM used as supported by the compute target. + :paramtype instance_type: str + :keyword properties: Additional properties bag. + :paramtype properties: dict[str, JSON] + """ + super().__init__(**kwargs) + self.instance_count = instance_count + self.instance_type = instance_type + self.properties = properties + + +class DeploymentResourceConfiguration(ResourceConfiguration): + """DeploymentResourceConfiguration. + + :ivar instance_count: Optional number of instances or nodes used by the compute target. + :vartype instance_count: int + :ivar instance_type: Optional type of VM used as supported by the compute target. + :vartype instance_type: str + :ivar properties: Additional properties bag. + :vartype properties: dict[str, JSON] + """ + + _attribute_map = { + "instance_count": {"key": "instanceCount", "type": "int"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "properties": {"key": "properties", "type": "{object}"}, + } + + def __init__( + self, + *, + instance_count: int = 1, + instance_type: Optional[str] = None, + properties: Optional[Dict[str, JSON]] = None, + **kwargs + ): + """ + :keyword instance_count: Optional number of instances or nodes used by the compute target. + :paramtype instance_count: int + :keyword instance_type: Optional type of VM used as supported by the compute target. + :paramtype instance_type: str + :keyword properties: Additional properties bag. + :paramtype properties: dict[str, JSON] + """ + super().__init__(instance_count=instance_count, instance_type=instance_type, properties=properties, **kwargs) + + +class DiagnoseRequestProperties(_serialization.Model): + """DiagnoseRequestProperties. + + :ivar udr: Setting for diagnosing user defined routing. + :vartype udr: dict[str, JSON] + :ivar nsg: Setting for diagnosing network security group. + :vartype nsg: dict[str, JSON] + :ivar resource_lock: Setting for diagnosing resource lock. + :vartype resource_lock: dict[str, JSON] + :ivar dns_resolution: Setting for diagnosing dns resolution. + :vartype dns_resolution: dict[str, JSON] + :ivar storage_account: Setting for diagnosing dependent storage account. + :vartype storage_account: dict[str, JSON] + :ivar key_vault: Setting for diagnosing dependent key vault. + :vartype key_vault: dict[str, JSON] + :ivar container_registry: Setting for diagnosing dependent container registry. + :vartype container_registry: dict[str, JSON] + :ivar application_insights: Setting for diagnosing dependent application insights. + :vartype application_insights: dict[str, JSON] + :ivar others: Setting for diagnosing unclassified category of problems. + :vartype others: dict[str, JSON] + """ + + _attribute_map = { + "udr": {"key": "udr", "type": "{object}"}, + "nsg": {"key": "nsg", "type": "{object}"}, + "resource_lock": {"key": "resourceLock", "type": "{object}"}, + "dns_resolution": {"key": "dnsResolution", "type": "{object}"}, + "storage_account": {"key": "storageAccount", "type": "{object}"}, + "key_vault": {"key": "keyVault", "type": "{object}"}, + "container_registry": {"key": "containerRegistry", "type": "{object}"}, + "application_insights": {"key": "applicationInsights", "type": "{object}"}, + "others": {"key": "others", "type": "{object}"}, + } + + def __init__( + self, + *, + udr: Optional[Dict[str, JSON]] = None, + nsg: Optional[Dict[str, JSON]] = None, + resource_lock: Optional[Dict[str, JSON]] = None, + dns_resolution: Optional[Dict[str, JSON]] = None, + storage_account: Optional[Dict[str, JSON]] = None, + key_vault: Optional[Dict[str, JSON]] = None, + container_registry: Optional[Dict[str, JSON]] = None, + application_insights: Optional[Dict[str, JSON]] = None, + others: Optional[Dict[str, JSON]] = None, + **kwargs + ): + """ + :keyword udr: Setting for diagnosing user defined routing. + :paramtype udr: dict[str, JSON] + :keyword nsg: Setting for diagnosing network security group. + :paramtype nsg: dict[str, JSON] + :keyword resource_lock: Setting for diagnosing resource lock. + :paramtype resource_lock: dict[str, JSON] + :keyword dns_resolution: Setting for diagnosing dns resolution. + :paramtype dns_resolution: dict[str, JSON] + :keyword storage_account: Setting for diagnosing dependent storage account. + :paramtype storage_account: dict[str, JSON] + :keyword key_vault: Setting for diagnosing dependent key vault. + :paramtype key_vault: dict[str, JSON] + :keyword container_registry: Setting for diagnosing dependent container registry. + :paramtype container_registry: dict[str, JSON] + :keyword application_insights: Setting for diagnosing dependent application insights. + :paramtype application_insights: dict[str, JSON] + :keyword others: Setting for diagnosing unclassified category of problems. + :paramtype others: dict[str, JSON] + """ + super().__init__(**kwargs) + self.udr = udr + self.nsg = nsg + self.resource_lock = resource_lock + self.dns_resolution = dns_resolution + self.storage_account = storage_account + self.key_vault = key_vault + self.container_registry = container_registry + self.application_insights = application_insights + self.others = others + + +class DiagnoseResponseResult(_serialization.Model): + """DiagnoseResponseResult. + + :ivar value: + :vartype value: ~azure.mgmt.machinelearningservices.models.DiagnoseResponseResultValue + """ + + _attribute_map = { + "value": {"key": "value", "type": "DiagnoseResponseResultValue"}, + } + + def __init__(self, *, value: Optional["_models.DiagnoseResponseResultValue"] = None, **kwargs): + """ + :keyword value: + :paramtype value: ~azure.mgmt.machinelearningservices.models.DiagnoseResponseResultValue + """ + super().__init__(**kwargs) + self.value = value + + +class DiagnoseResponseResultValue(_serialization.Model): + """DiagnoseResponseResultValue. + + :ivar user_defined_route_results: + :vartype user_defined_route_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :ivar network_security_rule_results: + :vartype network_security_rule_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :ivar resource_lock_results: + :vartype resource_lock_results: list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :ivar dns_resolution_results: + :vartype dns_resolution_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :ivar storage_account_results: + :vartype storage_account_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :ivar key_vault_results: + :vartype key_vault_results: list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :ivar container_registry_results: + :vartype container_registry_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :ivar application_insights_results: + :vartype application_insights_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :ivar other_results: + :vartype other_results: list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + """ + + _attribute_map = { + "user_defined_route_results": {"key": "userDefinedRouteResults", "type": "[DiagnoseResult]"}, + "network_security_rule_results": {"key": "networkSecurityRuleResults", "type": "[DiagnoseResult]"}, + "resource_lock_results": {"key": "resourceLockResults", "type": "[DiagnoseResult]"}, + "dns_resolution_results": {"key": "dnsResolutionResults", "type": "[DiagnoseResult]"}, + "storage_account_results": {"key": "storageAccountResults", "type": "[DiagnoseResult]"}, + "key_vault_results": {"key": "keyVaultResults", "type": "[DiagnoseResult]"}, + "container_registry_results": {"key": "containerRegistryResults", "type": "[DiagnoseResult]"}, + "application_insights_results": {"key": "applicationInsightsResults", "type": "[DiagnoseResult]"}, + "other_results": {"key": "otherResults", "type": "[DiagnoseResult]"}, + } + + def __init__( + self, + *, + user_defined_route_results: Optional[List["_models.DiagnoseResult"]] = None, + network_security_rule_results: Optional[List["_models.DiagnoseResult"]] = None, + resource_lock_results: Optional[List["_models.DiagnoseResult"]] = None, + dns_resolution_results: Optional[List["_models.DiagnoseResult"]] = None, + storage_account_results: Optional[List["_models.DiagnoseResult"]] = None, + key_vault_results: Optional[List["_models.DiagnoseResult"]] = None, + container_registry_results: Optional[List["_models.DiagnoseResult"]] = None, + application_insights_results: Optional[List["_models.DiagnoseResult"]] = None, + other_results: Optional[List["_models.DiagnoseResult"]] = None, + **kwargs + ): + """ + :keyword user_defined_route_results: + :paramtype user_defined_route_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :keyword network_security_rule_results: + :paramtype network_security_rule_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :keyword resource_lock_results: + :paramtype resource_lock_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :keyword dns_resolution_results: + :paramtype dns_resolution_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :keyword storage_account_results: + :paramtype storage_account_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :keyword key_vault_results: + :paramtype key_vault_results: list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :keyword container_registry_results: + :paramtype container_registry_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :keyword application_insights_results: + :paramtype application_insights_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :keyword other_results: + :paramtype other_results: list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + """ + super().__init__(**kwargs) + self.user_defined_route_results = user_defined_route_results + self.network_security_rule_results = network_security_rule_results + self.resource_lock_results = resource_lock_results + self.dns_resolution_results = dns_resolution_results + self.storage_account_results = storage_account_results + self.key_vault_results = key_vault_results + self.container_registry_results = container_registry_results + self.application_insights_results = application_insights_results + self.other_results = other_results + + +class DiagnoseResult(_serialization.Model): + """Result of Diagnose. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: Code for workspace setup error. + :vartype code: str + :ivar level: Level of workspace setup error. Known values are: "Warning", "Error", and + "Information". + :vartype level: str or ~azure.mgmt.machinelearningservices.models.DiagnoseResultLevel + :ivar message: Message of workspace setup error. + :vartype message: str + """ + + _validation = { + "code": {"readonly": True}, + "level": {"readonly": True}, + "message": {"readonly": True}, + } + + _attribute_map = { + "code": {"key": "code", "type": "str"}, + "level": {"key": "level", "type": "str"}, + "message": {"key": "message", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.code = None + self.level = None + self.message = None + + +class DiagnoseWorkspaceParameters(_serialization.Model): + """Parameters to diagnose a workspace. + + :ivar value: Value of Parameters. + :vartype value: ~azure.mgmt.machinelearningservices.models.DiagnoseRequestProperties + """ + + _attribute_map = { + "value": {"key": "value", "type": "DiagnoseRequestProperties"}, + } + + def __init__(self, *, value: Optional["_models.DiagnoseRequestProperties"] = None, **kwargs): + """ + :keyword value: Value of Parameters. + :paramtype value: ~azure.mgmt.machinelearningservices.models.DiagnoseRequestProperties + """ + super().__init__(**kwargs) + self.value = value + + +class DistributionConfiguration(_serialization.Model): + """Base definition for job distribution configuration. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + Mpi, PyTorch, TensorFlow + + All required parameters must be populated in order to send to Azure. + + :ivar distribution_type: [Required] Specifies the type of distribution framework. Required. + Known values are: "PyTorch", "TensorFlow", and "Mpi". + :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType + """ + + _validation = { + "distribution_type": {"required": True}, + } + + _attribute_map = { + "distribution_type": {"key": "distributionType", "type": "str"}, + } + + _subtype_map = {"distribution_type": {"Mpi": "Mpi", "PyTorch": "PyTorch", "TensorFlow": "TensorFlow"}} + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.distribution_type = None # type: Optional[str] + + +class Docker(_serialization.Model): + """Docker. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar privileged: Indicate whether container shall run in privileged or non-privileged mode. + :vartype privileged: bool + """ + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "privileged": {"key": "privileged", "type": "bool"}, + } + + def __init__( + self, *, additional_properties: Optional[Dict[str, Any]] = None, privileged: Optional[bool] = None, **kwargs + ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword privileged: Indicate whether container shall run in privileged or non-privileged mode. + :paramtype privileged: bool + """ + super().__init__(**kwargs) + self.additional_properties = additional_properties + self.privileged = privileged + + +class EncryptionKeyVaultProperties(_serialization.Model): + """EncryptionKeyVaultProperties. + + All required parameters must be populated in order to send to Azure. + + :ivar key_vault_arm_id: The ArmId of the keyVault where the customer owned encryption key is + present. Required. + :vartype key_vault_arm_id: str + :ivar key_identifier: Key vault uri to access the encryption key. Required. + :vartype key_identifier: str + :ivar identity_client_id: For future use - The client id of the identity which will be used to + access key vault. + :vartype identity_client_id: str + """ + + _validation = { + "key_vault_arm_id": {"required": True}, + "key_identifier": {"required": True}, + } + + _attribute_map = { + "key_vault_arm_id": {"key": "keyVaultArmId", "type": "str"}, + "key_identifier": {"key": "keyIdentifier", "type": "str"}, + "identity_client_id": {"key": "identityClientId", "type": "str"}, + } + + def __init__( + self, *, key_vault_arm_id: str, key_identifier: str, identity_client_id: Optional[str] = None, **kwargs + ): + """ + :keyword key_vault_arm_id: The ArmId of the keyVault where the customer owned encryption key is + present. Required. + :paramtype key_vault_arm_id: str + :keyword key_identifier: Key vault uri to access the encryption key. Required. + :paramtype key_identifier: str + :keyword identity_client_id: For future use - The client id of the identity which will be used + to access key vault. + :paramtype identity_client_id: str + """ + super().__init__(**kwargs) + self.key_vault_arm_id = key_vault_arm_id + self.key_identifier = key_identifier + self.identity_client_id = identity_client_id + + +class EncryptionKeyVaultUpdateProperties(_serialization.Model): + """EncryptionKeyVaultUpdateProperties. + + All required parameters must be populated in order to send to Azure. + + :ivar key_identifier: Key Vault uri to access the encryption key. Required. + :vartype key_identifier: str + """ + + _validation = { + "key_identifier": {"required": True}, + } + + _attribute_map = { + "key_identifier": {"key": "keyIdentifier", "type": "str"}, + } + + def __init__(self, *, key_identifier: str, **kwargs): + """ + :keyword key_identifier: Key Vault uri to access the encryption key. Required. + :paramtype key_identifier: str + """ + super().__init__(**kwargs) + self.key_identifier = key_identifier + + +class EncryptionProperty(_serialization.Model): + """EncryptionProperty. + + All required parameters must be populated in order to send to Azure. + + :ivar status: Indicates whether or not the encryption is enabled for the workspace. Required. + Known values are: "Enabled" and "Disabled". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.EncryptionStatus + :ivar identity: The identity that will be used to access the key vault for encryption at rest. + :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityForCmk + :ivar key_vault_properties: Customer Key vault properties. Required. + :vartype key_vault_properties: + ~azure.mgmt.machinelearningservices.models.EncryptionKeyVaultProperties + """ + + _validation = { + "status": {"required": True}, + "key_vault_properties": {"required": True}, + } + + _attribute_map = { + "status": {"key": "status", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityForCmk"}, + "key_vault_properties": {"key": "keyVaultProperties", "type": "EncryptionKeyVaultProperties"}, + } + + def __init__( + self, + *, + status: Union[str, "_models.EncryptionStatus"], + key_vault_properties: "_models.EncryptionKeyVaultProperties", + identity: Optional["_models.IdentityForCmk"] = None, + **kwargs + ): + """ + :keyword status: Indicates whether or not the encryption is enabled for the workspace. + Required. Known values are: "Enabled" and "Disabled". + :paramtype status: str or ~azure.mgmt.machinelearningservices.models.EncryptionStatus + :keyword identity: The identity that will be used to access the key vault for encryption at + rest. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityForCmk + :keyword key_vault_properties: Customer Key vault properties. Required. + :paramtype key_vault_properties: + ~azure.mgmt.machinelearningservices.models.EncryptionKeyVaultProperties + """ + super().__init__(**kwargs) + self.status = status + self.identity = identity + self.key_vault_properties = key_vault_properties + + +class EncryptionUpdateProperties(_serialization.Model): + """EncryptionUpdateProperties. + + All required parameters must be populated in order to send to Azure. + + :ivar key_vault_properties: Customer Key vault properties. Required. + :vartype key_vault_properties: + ~azure.mgmt.machinelearningservices.models.EncryptionKeyVaultUpdateProperties + """ + + _validation = { + "key_vault_properties": {"required": True}, + } + + _attribute_map = { + "key_vault_properties": {"key": "keyVaultProperties", "type": "EncryptionKeyVaultUpdateProperties"}, + } + + def __init__(self, *, key_vault_properties: "_models.EncryptionKeyVaultUpdateProperties", **kwargs): + """ + :keyword key_vault_properties: Customer Key vault properties. Required. + :paramtype key_vault_properties: + ~azure.mgmt.machinelearningservices.models.EncryptionKeyVaultUpdateProperties + """ + super().__init__(**kwargs) + self.key_vault_properties = key_vault_properties + + +class Endpoint(_serialization.Model): + """Endpoint. + + :ivar protocol: Protocol over which communication will happen over this endpoint. Known values + are: "tcp", "udp", and "http". + :vartype protocol: str or ~azure.mgmt.machinelearningservices.models.Protocol + :ivar name: Name of the Endpoint. + :vartype name: str + :ivar target: Application port inside the container. + :vartype target: int + :ivar published: Port over which the application is exposed from container. + :vartype published: int + :ivar host_ip: Host IP over which the application is exposed from the container. + :vartype host_ip: str + """ + + _attribute_map = { + "protocol": {"key": "protocol", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "target": {"key": "target", "type": "int"}, + "published": {"key": "published", "type": "int"}, + "host_ip": {"key": "hostIp", "type": "str"}, + } + + def __init__( + self, + *, + protocol: Union[str, "_models.Protocol"] = "tcp", + name: Optional[str] = None, + target: Optional[int] = None, + published: Optional[int] = None, + host_ip: Optional[str] = None, + **kwargs + ): + """ + :keyword protocol: Protocol over which communication will happen over this endpoint. Known + values are: "tcp", "udp", and "http". + :paramtype protocol: str or ~azure.mgmt.machinelearningservices.models.Protocol + :keyword name: Name of the Endpoint. + :paramtype name: str + :keyword target: Application port inside the container. + :paramtype target: int + :keyword published: Port over which the application is exposed from container. + :paramtype published: int + :keyword host_ip: Host IP over which the application is exposed from the container. + :paramtype host_ip: str + """ + super().__init__(**kwargs) + self.protocol = protocol + self.name = name + self.target = target + self.published = published + self.host_ip = host_ip + + +class EndpointAuthKeys(_serialization.Model): + """Keys for endpoint authentication. + + :ivar primary_key: The primary key. + :vartype primary_key: str + :ivar secondary_key: The secondary key. + :vartype secondary_key: str + """ + + _attribute_map = { + "primary_key": {"key": "primaryKey", "type": "str"}, + "secondary_key": {"key": "secondaryKey", "type": "str"}, + } + + def __init__(self, *, primary_key: Optional[str] = None, secondary_key: Optional[str] = None, **kwargs): + """ + :keyword primary_key: The primary key. + :paramtype primary_key: str + :keyword secondary_key: The secondary key. + :paramtype secondary_key: str + """ + super().__init__(**kwargs) + self.primary_key = primary_key + self.secondary_key = secondary_key + + +class EndpointAuthToken(_serialization.Model): + """Service Token. + + :ivar access_token: Access token for endpoint authentication. + :vartype access_token: str + :ivar expiry_time_utc: Access token expiry time (UTC). + :vartype expiry_time_utc: int + :ivar refresh_after_time_utc: Refresh access token after time (UTC). + :vartype refresh_after_time_utc: int + :ivar token_type: Access token type. + :vartype token_type: str + """ + + _attribute_map = { + "access_token": {"key": "accessToken", "type": "str"}, + "expiry_time_utc": {"key": "expiryTimeUtc", "type": "int"}, + "refresh_after_time_utc": {"key": "refreshAfterTimeUtc", "type": "int"}, + "token_type": {"key": "tokenType", "type": "str"}, + } + + def __init__( + self, + *, + access_token: Optional[str] = None, + expiry_time_utc: int = 0, + refresh_after_time_utc: int = 0, + token_type: Optional[str] = None, + **kwargs + ): + """ + :keyword access_token: Access token for endpoint authentication. + :paramtype access_token: str + :keyword expiry_time_utc: Access token expiry time (UTC). + :paramtype expiry_time_utc: int + :keyword refresh_after_time_utc: Refresh access token after time (UTC). + :paramtype refresh_after_time_utc: int + :keyword token_type: Access token type. + :paramtype token_type: str + """ + super().__init__(**kwargs) + self.access_token = access_token + self.expiry_time_utc = expiry_time_utc + self.refresh_after_time_utc = refresh_after_time_utc + self.token_type = token_type + + +class ScheduleActionBase(_serialization.Model): + """ScheduleActionBase. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + JobScheduleAction, EndpointScheduleAction + + All required parameters must be populated in order to send to Azure. + + :ivar action_type: [Required] Specifies the action type of the schedule. Required. Known values + are: "CreateJob" and "InvokeBatchEndpoint". + :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType + """ + + _validation = { + "action_type": {"required": True}, + } + + _attribute_map = { + "action_type": {"key": "actionType", "type": "str"}, + } + + _subtype_map = {"action_type": {"CreateJob": "JobScheduleAction", "InvokeBatchEndpoint": "EndpointScheduleAction"}} + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.action_type = None # type: Optional[str] + + +class EndpointScheduleAction(ScheduleActionBase): + """EndpointScheduleAction. + + All required parameters must be populated in order to send to Azure. + + :ivar action_type: [Required] Specifies the action type of the schedule. Required. Known values + are: "CreateJob" and "InvokeBatchEndpoint". + :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType + :ivar endpoint_invocation_definition: [Required] Defines Schedule action definition details. + + + .. raw:: html + + . Required. + :vartype endpoint_invocation_definition: JSON + """ + + _validation = { + "action_type": {"required": True}, + "endpoint_invocation_definition": {"required": True}, + } + + _attribute_map = { + "action_type": {"key": "actionType", "type": "str"}, + "endpoint_invocation_definition": {"key": "endpointInvocationDefinition", "type": "object"}, + } + + def __init__(self, *, endpoint_invocation_definition: JSON, **kwargs): + """ + :keyword endpoint_invocation_definition: [Required] Defines Schedule action definition details. + + + .. raw:: html + + . Required. + :paramtype endpoint_invocation_definition: JSON + """ + super().__init__(**kwargs) + self.action_type = "InvokeBatchEndpoint" # type: str + self.endpoint_invocation_definition = endpoint_invocation_definition + + +class EnvironmentContainer(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.EnvironmentContainerProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "EnvironmentContainerProperties"}, + } + + def __init__(self, *, properties: "_models.EnvironmentContainerProperties", **kwargs): + """ + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: + ~azure.mgmt.machinelearningservices.models.EnvironmentContainerProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class EnvironmentContainerProperties(AssetContainer): + """Container for environment specification versions. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar latest_version: The latest version inside this container. + :vartype latest_version: str + :ivar next_version: The next auto incremental version. + :vartype next_version: str + :ivar provisioning_state: Provisioning state for the environment container. Known values are: + "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.AssetProvisioningState + """ + + _validation = { + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_archived: bool = False, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + """ + super().__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) + self.provisioning_state = None + + +class EnvironmentContainerResourceArmPaginatedResult(_serialization.Model): + """A paginated list of EnvironmentContainer entities. + + :ivar next_link: The link to the next page of EnvironmentContainer objects. If null, there are + no additional pages. + :vartype next_link: str + :ivar value: An array of objects of type EnvironmentContainer. + :vartype value: list[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[EnvironmentContainer]"}, + } + + def __init__( + self, *, next_link: Optional[str] = None, value: Optional[List["_models.EnvironmentContainer"]] = None, **kwargs + ): + """ + :keyword next_link: The link to the next page of EnvironmentContainer objects. If null, there + are no additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type EnvironmentContainer. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class EnvironmentVariable(_serialization.Model): + """EnvironmentVariable. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Type of the Environment Variable. Possible values are: local - For local variable. + "local" + :vartype type: str or ~azure.mgmt.machinelearningservices.models.EnvironmentVariableType + :ivar value: Value of the Environment variable. + :vartype value: str + """ + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "value": {"key": "value", "type": "str"}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, Any]] = None, + type: Union[str, "_models.EnvironmentVariableType"] = "local", + value: Optional[str] = None, + **kwargs + ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword type: Type of the Environment Variable. Possible values are: local - For local + variable. "local" + :paramtype type: str or ~azure.mgmt.machinelearningservices.models.EnvironmentVariableType + :keyword value: Value of the Environment variable. + :paramtype value: str + """ + super().__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.value = value + + +class EnvironmentVersion(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.EnvironmentVersionProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "EnvironmentVersionProperties"}, + } + + def __init__(self, *, properties: "_models.EnvironmentVersionProperties", **kwargs): + """ + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.EnvironmentVersionProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class EnvironmentVersionProperties(AssetBase): # pylint: disable=too-many-instance-attributes + """Environment version details. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_anonymous: If the name version are system generated (anonymous registration). + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar build: Configuration settings for Docker build context. + :vartype build: ~azure.mgmt.machinelearningservices.models.BuildContext + :ivar conda_file: Standard configuration file used by Conda that lets you install any kind of + package, including Python, R, and C/C++ packages. + + + .. raw:: html + + . + :vartype conda_file: str + :ivar environment_type: Environment type is either user managed or curated by the Azure ML + service + + + .. raw:: html + + . Known values are: "Curated" and "UserCreated". + :vartype environment_type: str or ~azure.mgmt.machinelearningservices.models.EnvironmentType + :ivar image: Name of the image that will be used for the environment. + + + .. raw:: html + + . + :vartype image: str + :ivar inference_config: Defines configuration specific to inference. + :vartype inference_config: + ~azure.mgmt.machinelearningservices.models.InferenceContainerProperties + :ivar os_type: The OS type of the environment. Known values are: "Linux" and "Windows". + :vartype os_type: str or ~azure.mgmt.machinelearningservices.models.OperatingSystemType + :ivar provisioning_state: Provisioning state for the environment version. Known values are: + "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.AssetProvisioningState + """ + + _validation = { + "environment_type": {"readonly": True}, + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "build": {"key": "build", "type": "BuildContext"}, + "conda_file": {"key": "condaFile", "type": "str"}, + "environment_type": {"key": "environmentType", "type": "str"}, + "image": {"key": "image", "type": "str"}, + "inference_config": {"key": "inferenceConfig", "type": "InferenceContainerProperties"}, + "os_type": {"key": "osType", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_anonymous: bool = False, + is_archived: bool = False, + build: Optional["_models.BuildContext"] = None, + conda_file: Optional[str] = None, + image: Optional[str] = None, + inference_config: Optional["_models.InferenceContainerProperties"] = None, + os_type: Optional[Union[str, "_models.OperatingSystemType"]] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_anonymous: If the name version are system generated (anonymous registration). + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword build: Configuration settings for Docker build context. + :paramtype build: ~azure.mgmt.machinelearningservices.models.BuildContext + :keyword conda_file: Standard configuration file used by Conda that lets you install any kind + of package, including Python, R, and C/C++ packages. + + + .. raw:: html + + . + :paramtype conda_file: str + :keyword image: Name of the image that will be used for the environment. + + + .. raw:: html + + . + :paramtype image: str + :keyword inference_config: Defines configuration specific to inference. + :paramtype inference_config: + ~azure.mgmt.machinelearningservices.models.InferenceContainerProperties + :keyword os_type: The OS type of the environment. Known values are: "Linux" and "Windows". + :paramtype os_type: str or ~azure.mgmt.machinelearningservices.models.OperatingSystemType + """ + super().__init__( + description=description, + properties=properties, + tags=tags, + is_anonymous=is_anonymous, + is_archived=is_archived, + **kwargs + ) + self.build = build + self.conda_file = conda_file + self.environment_type = None + self.image = image + self.inference_config = inference_config + self.os_type = os_type + self.provisioning_state = None + + +class EnvironmentVersionResourceArmPaginatedResult(_serialization.Model): + """A paginated list of EnvironmentVersion entities. + + :ivar next_link: The link to the next page of EnvironmentVersion objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type EnvironmentVersion. + :vartype value: list[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[EnvironmentVersion]"}, + } + + def __init__( + self, *, next_link: Optional[str] = None, value: Optional[List["_models.EnvironmentVersion"]] = None, **kwargs + ): + """ + :keyword next_link: The link to the next page of EnvironmentVersion objects. If null, there are + no additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type EnvironmentVersion. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class ErrorAdditionalInfo(_serialization.Model): + """The resource management error additional info. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar type: The additional info type. + :vartype type: str + :ivar info: The additional info. + :vartype info: JSON + """ + + _validation = { + "type": {"readonly": True}, + "info": {"readonly": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "info": {"key": "info", "type": "object"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.type = None + self.info = None + + +class ErrorDetail(_serialization.Model): + """The error detail. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: The error code. + :vartype code: str + :ivar message: The error message. + :vartype message: str + :ivar target: The error target. + :vartype target: str + :ivar details: The error details. + :vartype details: list[~azure.mgmt.machinelearningservices.models.ErrorDetail] + :ivar additional_info: The error additional info. + :vartype additional_info: list[~azure.mgmt.machinelearningservices.models.ErrorAdditionalInfo] + """ + + _validation = { + "code": {"readonly": True}, + "message": {"readonly": True}, + "target": {"readonly": True}, + "details": {"readonly": True}, + "additional_info": {"readonly": True}, + } + + _attribute_map = { + "code": {"key": "code", "type": "str"}, + "message": {"key": "message", "type": "str"}, + "target": {"key": "target", "type": "str"}, + "details": {"key": "details", "type": "[ErrorDetail]"}, + "additional_info": {"key": "additionalInfo", "type": "[ErrorAdditionalInfo]"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.code = None + self.message = None + self.target = None + self.details = None + self.additional_info = None + + +class ErrorResponse(_serialization.Model): + """Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.). + + :ivar error: The error object. + :vartype error: ~azure.mgmt.machinelearningservices.models.ErrorDetail + """ + + _attribute_map = { + "error": {"key": "error", "type": "ErrorDetail"}, + } + + def __init__(self, *, error: Optional["_models.ErrorDetail"] = None, **kwargs): + """ + :keyword error: The error object. + :paramtype error: ~azure.mgmt.machinelearningservices.models.ErrorDetail + """ + super().__init__(**kwargs) + self.error = error + + +class EstimatedVMPrice(_serialization.Model): + """The estimated price info for using a VM of a particular OS type, tier, etc. + + All required parameters must be populated in order to send to Azure. + + :ivar retail_price: The price charged for using the VM. Required. + :vartype retail_price: float + :ivar os_type: Operating system type used by the VM. Required. Known values are: "Linux" and + "Windows". + :vartype os_type: str or ~azure.mgmt.machinelearningservices.models.VMPriceOSType + :ivar vm_tier: The type of the VM. Required. Known values are: "Standard", "LowPriority", and + "Spot". + :vartype vm_tier: str or ~azure.mgmt.machinelearningservices.models.VMTier + """ + + _validation = { + "retail_price": {"required": True}, + "os_type": {"required": True}, + "vm_tier": {"required": True}, + } + + _attribute_map = { + "retail_price": {"key": "retailPrice", "type": "float"}, + "os_type": {"key": "osType", "type": "str"}, + "vm_tier": {"key": "vmTier", "type": "str"}, + } + + def __init__( + self, + *, + retail_price: float, + os_type: Union[str, "_models.VMPriceOSType"], + vm_tier: Union[str, "_models.VMTier"], + **kwargs + ): + """ + :keyword retail_price: The price charged for using the VM. Required. + :paramtype retail_price: float + :keyword os_type: Operating system type used by the VM. Required. Known values are: "Linux" and + "Windows". + :paramtype os_type: str or ~azure.mgmt.machinelearningservices.models.VMPriceOSType + :keyword vm_tier: The type of the VM. Required. Known values are: "Standard", "LowPriority", + and "Spot". + :paramtype vm_tier: str or ~azure.mgmt.machinelearningservices.models.VMTier + """ + super().__init__(**kwargs) + self.retail_price = retail_price + self.os_type = os_type + self.vm_tier = vm_tier + + +class EstimatedVMPrices(_serialization.Model): + """The estimated price info for using a VM. + + All required parameters must be populated in order to send to Azure. + + :ivar billing_currency: Three lettered code specifying the currency of the VM price. Example: + USD. Required. "USD" + :vartype billing_currency: str or ~azure.mgmt.machinelearningservices.models.BillingCurrency + :ivar unit_of_measure: The unit of time measurement for the specified VM price. Example: + OneHour. Required. "OneHour" + :vartype unit_of_measure: str or ~azure.mgmt.machinelearningservices.models.UnitOfMeasure + :ivar values: The list of estimated prices for using a VM of a particular OS type, tier, etc. + Required. + :vartype values: list[~azure.mgmt.machinelearningservices.models.EstimatedVMPrice] + """ + + _validation = { + "billing_currency": {"required": True}, + "unit_of_measure": {"required": True}, + "values": {"required": True}, + } + + _attribute_map = { + "billing_currency": {"key": "billingCurrency", "type": "str"}, + "unit_of_measure": {"key": "unitOfMeasure", "type": "str"}, + "values": {"key": "values", "type": "[EstimatedVMPrice]"}, + } + + def __init__( + self, + *, + billing_currency: Union[str, "_models.BillingCurrency"], + unit_of_measure: Union[str, "_models.UnitOfMeasure"], + values: List["_models.EstimatedVMPrice"], + **kwargs + ): + """ + :keyword billing_currency: Three lettered code specifying the currency of the VM price. + Example: USD. Required. "USD" + :paramtype billing_currency: str or ~azure.mgmt.machinelearningservices.models.BillingCurrency + :keyword unit_of_measure: The unit of time measurement for the specified VM price. Example: + OneHour. Required. "OneHour" + :paramtype unit_of_measure: str or ~azure.mgmt.machinelearningservices.models.UnitOfMeasure + :keyword values: The list of estimated prices for using a VM of a particular OS type, tier, + etc. Required. + :paramtype values: list[~azure.mgmt.machinelearningservices.models.EstimatedVMPrice] + """ + super().__init__(**kwargs) + self.billing_currency = billing_currency + self.unit_of_measure = unit_of_measure + self.values = values + + +class ExternalFQDNResponse(_serialization.Model): + """ExternalFQDNResponse. + + :ivar value: + :vartype value: list[~azure.mgmt.machinelearningservices.models.FQDNEndpoints] + """ + + _attribute_map = { + "value": {"key": "value", "type": "[FQDNEndpoints]"}, + } + + def __init__(self, *, value: Optional[List["_models.FQDNEndpoints"]] = None, **kwargs): + """ + :keyword value: + :paramtype value: list[~azure.mgmt.machinelearningservices.models.FQDNEndpoints] + """ + super().__init__(**kwargs) + self.value = value + + +class FeaturizationSettings(_serialization.Model): + """Featurization Configuration. + + :ivar dataset_language: Dataset language, useful for the text data. + :vartype dataset_language: str + """ + + _attribute_map = { + "dataset_language": {"key": "datasetLanguage", "type": "str"}, + } + + def __init__(self, *, dataset_language: Optional[str] = None, **kwargs): + """ + :keyword dataset_language: Dataset language, useful for the text data. + :paramtype dataset_language: str + """ + super().__init__(**kwargs) + self.dataset_language = dataset_language + + +class FlavorData(_serialization.Model): + """FlavorData. + + :ivar data: Model flavor-specific data. + :vartype data: dict[str, str] + """ + + _attribute_map = { + "data": {"key": "data", "type": "{str}"}, + } + + def __init__(self, *, data: Optional[Dict[str, str]] = None, **kwargs): + """ + :keyword data: Model flavor-specific data. + :paramtype data: dict[str, str] + """ + super().__init__(**kwargs) + self.data = data + + +class Forecasting(TableVertical, AutoMLVertical): # pylint: disable=too-many-instance-attributes + """Forecasting task in AutoML Table vertical. + + All required parameters must be populated in order to send to Azure. + + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar cv_split_column_names: Columns to use for CVSplit data. + :vartype cv_split_column_names: list[str] + :ivar featurization_settings: Featurization inputs needed for AutoML job. + :vartype featurization_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :ivar fixed_parameters: Model/training parameters that will remain constant throughout + training. + :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters + :ivar limit_settings: Execution constraints for AutoMLJob. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings + :ivar n_cross_validations: Number of cross validation folds to be applied on training dataset + when validation dataset is not provided. + :vartype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] + :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings + :ivar test_data: Test data input. + :vartype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar test_data_size: The fraction of test dataset that needs to be set aside for validation + purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype test_data_size: float + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar weight_column_name: The name of the sample weight column. Automated ML supports a + weighted column as an input, causing rows in the data to be weighted up or down. + :vartype weight_column_name: str + :ivar forecasting_settings: Forecasting task specific inputs. + :vartype forecasting_settings: ~azure.mgmt.machinelearningservices.models.ForecastingSettings + :ivar primary_metric: Primary metric for forecasting task. Known values are: + "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", and + "NormalizedMeanAbsoluteError". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ForecastingPrimaryMetrics + :ivar training_settings: Inputs for training phase for an AutoML Job. + :vartype training_settings: + ~azure.mgmt.machinelearningservices.models.ForecastingTrainingSettings + """ + + _validation = { + "task_type": {"required": True}, + "training_data": {"required": True}, + } + + _attribute_map = { + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "cv_split_column_names": {"key": "cvSplitColumnNames", "type": "[str]"}, + "featurization_settings": {"key": "featurizationSettings", "type": "TableVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "TableFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "TableVerticalLimitSettings"}, + "n_cross_validations": {"key": "nCrossValidations", "type": "NCrossValidations"}, + "search_space": {"key": "searchSpace", "type": "[TableParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "TableSweepSettings"}, + "test_data": {"key": "testData", "type": "MLTableJobInput"}, + "test_data_size": {"key": "testDataSize", "type": "float"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "weight_column_name": {"key": "weightColumnName", "type": "str"}, + "forecasting_settings": {"key": "forecastingSettings", "type": "ForecastingSettings"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, + "training_settings": {"key": "trainingSettings", "type": "ForecastingTrainingSettings"}, + } + + def __init__( + self, + *, + training_data: "_models.MLTableJobInput", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + cv_split_column_names: Optional[List[str]] = None, + featurization_settings: Optional["_models.TableVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.TableFixedParameters"] = None, + limit_settings: Optional["_models.TableVerticalLimitSettings"] = None, + n_cross_validations: Optional["_models.NCrossValidations"] = None, + search_space: Optional[List["_models.TableParameterSubspace"]] = None, + sweep_settings: Optional["_models.TableSweepSettings"] = None, + test_data: Optional["_models.MLTableJobInput"] = None, + test_data_size: Optional[float] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + weight_column_name: Optional[str] = None, + forecasting_settings: Optional["_models.ForecastingSettings"] = None, + primary_metric: Optional[Union[str, "_models.ForecastingPrimaryMetrics"]] = None, + training_settings: Optional["_models.ForecastingTrainingSettings"] = None, + **kwargs + ): + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword cv_split_column_names: Columns to use for CVSplit data. + :paramtype cv_split_column_names: list[str] + :keyword featurization_settings: Featurization inputs needed for AutoML job. + :paramtype featurization_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :keyword fixed_parameters: Model/training parameters that will remain constant throughout + training. + :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters + :keyword limit_settings: Execution constraints for AutoMLJob. + :paramtype limit_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings + :keyword n_cross_validations: Number of cross validation folds to be applied on training + dataset + when validation dataset is not provided. + :paramtype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] + :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings + :keyword test_data: Test data input. + :paramtype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword test_data_size: The fraction of test dataset that needs to be set aside for validation + purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype test_data_size: float + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword weight_column_name: The name of the sample weight column. Automated ML supports a + weighted column as an input, causing rows in the data to be weighted up or down. + :paramtype weight_column_name: str + :keyword forecasting_settings: Forecasting task specific inputs. + :paramtype forecasting_settings: ~azure.mgmt.machinelearningservices.models.ForecastingSettings + :keyword primary_metric: Primary metric for forecasting task. Known values are: + "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", and + "NormalizedMeanAbsoluteError". + :paramtype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ForecastingPrimaryMetrics + :keyword training_settings: Inputs for training phase for an AutoML Job. + :paramtype training_settings: + ~azure.mgmt.machinelearningservices.models.ForecastingTrainingSettings + """ + super().__init__( + cv_split_column_names=cv_split_column_names, + featurization_settings=featurization_settings, + fixed_parameters=fixed_parameters, + limit_settings=limit_settings, + n_cross_validations=n_cross_validations, + search_space=search_space, + sweep_settings=sweep_settings, + test_data=test_data, + test_data_size=test_data_size, + validation_data=validation_data, + validation_data_size=validation_data_size, + weight_column_name=weight_column_name, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type = "Forecasting" # type: str + self.training_data = training_data + self.forecasting_settings = forecasting_settings + self.primary_metric = primary_metric + self.training_settings = training_settings + self.cv_split_column_names = cv_split_column_names + self.featurization_settings = featurization_settings + self.fixed_parameters = fixed_parameters + self.limit_settings = limit_settings + self.n_cross_validations = n_cross_validations + self.search_space = search_space + self.sweep_settings = sweep_settings + self.test_data = test_data + self.test_data_size = test_data_size + self.validation_data = validation_data + self.validation_data_size = validation_data_size + self.weight_column_name = weight_column_name + + +class ForecastingSettings(_serialization.Model): # pylint: disable=too-many-instance-attributes + """Forecasting specific parameters. + + :ivar country_or_region_for_holidays: Country or region for holidays for forecasting tasks. + These should be ISO 3166 two-letter country/region codes, for example 'US' or 'GB'. + :vartype country_or_region_for_holidays: str + :ivar cv_step_size: Number of periods between the origin time of one CV fold and the next fold. + For + example, if ``CVStepSize`` = 3 for daily data, the origin time for each fold will be + three days apart. + :vartype cv_step_size: int + :ivar feature_lags: Flag for generating lags for the numeric features with 'auto' or null. + Known values are: "None" and "Auto". + :vartype feature_lags: str or ~azure.mgmt.machinelearningservices.models.FeatureLags + :ivar forecast_horizon: The desired maximum forecast horizon in units of time-series frequency. + :vartype forecast_horizon: ~azure.mgmt.machinelearningservices.models.ForecastHorizon + :ivar frequency: When forecasting, this parameter represents the period with which the forecast + is desired, for example daily, weekly, yearly, etc. The forecast frequency is dataset frequency + by default. + :vartype frequency: str + :ivar seasonality: Set time series seasonality as an integer multiple of the series frequency. + If seasonality is set to 'auto', it will be inferred. + :vartype seasonality: ~azure.mgmt.machinelearningservices.models.Seasonality + :ivar short_series_handling_config: The parameter defining how if AutoML should handle short + time series. Known values are: "None", "Auto", "Pad", and "Drop". + :vartype short_series_handling_config: str or + ~azure.mgmt.machinelearningservices.models.ShortSeriesHandlingConfiguration + :ivar target_aggregate_function: The function to be used to aggregate the time series target + column to conform to a user specified frequency. + If the TargetAggregateFunction is set i.e. not 'None', but the freq parameter is not set, the + error is raised. The possible target aggregation functions are: "sum", "max", "min" and "mean". + Known values are: "None", "Sum", "Max", "Min", and "Mean". + :vartype target_aggregate_function: str or + ~azure.mgmt.machinelearningservices.models.TargetAggregationFunction + :ivar target_lags: The number of past periods to lag from the target column. + :vartype target_lags: ~azure.mgmt.machinelearningservices.models.TargetLags + :ivar target_rolling_window_size: The number of past periods used to create a rolling window + average of the target column. + :vartype target_rolling_window_size: + ~azure.mgmt.machinelearningservices.models.TargetRollingWindowSize + :ivar time_column_name: The name of the time column. This parameter is required when + forecasting to specify the datetime column in the input data used for building the time series + and inferring its frequency. + :vartype time_column_name: str + :ivar time_series_id_column_names: The names of columns used to group a timeseries. It can be + used to create multiple series. + If grain is not defined, the data set is assumed to be one time-series. This parameter is used + with task type forecasting. + :vartype time_series_id_column_names: list[str] + :ivar use_stl: Configure STL Decomposition of the time-series target column. Known values are: + "None", "Season", and "SeasonTrend". + :vartype use_stl: str or ~azure.mgmt.machinelearningservices.models.UseStl + """ + + _attribute_map = { + "country_or_region_for_holidays": {"key": "countryOrRegionForHolidays", "type": "str"}, + "cv_step_size": {"key": "cvStepSize", "type": "int"}, + "feature_lags": {"key": "featureLags", "type": "str"}, + "forecast_horizon": {"key": "forecastHorizon", "type": "ForecastHorizon"}, + "frequency": {"key": "frequency", "type": "str"}, + "seasonality": {"key": "seasonality", "type": "Seasonality"}, + "short_series_handling_config": {"key": "shortSeriesHandlingConfig", "type": "str"}, + "target_aggregate_function": {"key": "targetAggregateFunction", "type": "str"}, + "target_lags": {"key": "targetLags", "type": "TargetLags"}, + "target_rolling_window_size": {"key": "targetRollingWindowSize", "type": "TargetRollingWindowSize"}, + "time_column_name": {"key": "timeColumnName", "type": "str"}, + "time_series_id_column_names": {"key": "timeSeriesIdColumnNames", "type": "[str]"}, + "use_stl": {"key": "useStl", "type": "str"}, + } + + def __init__( + self, + *, + country_or_region_for_holidays: Optional[str] = None, + cv_step_size: Optional[int] = None, + feature_lags: Optional[Union[str, "_models.FeatureLags"]] = None, + forecast_horizon: Optional["_models.ForecastHorizon"] = None, + frequency: Optional[str] = None, + seasonality: Optional["_models.Seasonality"] = None, + short_series_handling_config: Optional[Union[str, "_models.ShortSeriesHandlingConfiguration"]] = None, + target_aggregate_function: Optional[Union[str, "_models.TargetAggregationFunction"]] = None, + target_lags: Optional["_models.TargetLags"] = None, + target_rolling_window_size: Optional["_models.TargetRollingWindowSize"] = None, + time_column_name: Optional[str] = None, + time_series_id_column_names: Optional[List[str]] = None, + use_stl: Optional[Union[str, "_models.UseStl"]] = None, + **kwargs + ): + """ + :keyword country_or_region_for_holidays: Country or region for holidays for forecasting tasks. + These should be ISO 3166 two-letter country/region codes, for example 'US' or 'GB'. + :paramtype country_or_region_for_holidays: str + :keyword cv_step_size: Number of periods between the origin time of one CV fold and the next + fold. For + example, if ``CVStepSize`` = 3 for daily data, the origin time for each fold will be + three days apart. + :paramtype cv_step_size: int + :keyword feature_lags: Flag for generating lags for the numeric features with 'auto' or null. + Known values are: "None" and "Auto". + :paramtype feature_lags: str or ~azure.mgmt.machinelearningservices.models.FeatureLags + :keyword forecast_horizon: The desired maximum forecast horizon in units of time-series + frequency. + :paramtype forecast_horizon: ~azure.mgmt.machinelearningservices.models.ForecastHorizon + :keyword frequency: When forecasting, this parameter represents the period with which the + forecast is desired, for example daily, weekly, yearly, etc. The forecast frequency is dataset + frequency by default. + :paramtype frequency: str + :keyword seasonality: Set time series seasonality as an integer multiple of the series + frequency. + If seasonality is set to 'auto', it will be inferred. + :paramtype seasonality: ~azure.mgmt.machinelearningservices.models.Seasonality + :keyword short_series_handling_config: The parameter defining how if AutoML should handle short + time series. Known values are: "None", "Auto", "Pad", and "Drop". + :paramtype short_series_handling_config: str or + ~azure.mgmt.machinelearningservices.models.ShortSeriesHandlingConfiguration + :keyword target_aggregate_function: The function to be used to aggregate the time series target + column to conform to a user specified frequency. + If the TargetAggregateFunction is set i.e. not 'None', but the freq parameter is not set, the + error is raised. The possible target aggregation functions are: "sum", "max", "min" and "mean". + Known values are: "None", "Sum", "Max", "Min", and "Mean". + :paramtype target_aggregate_function: str or + ~azure.mgmt.machinelearningservices.models.TargetAggregationFunction + :keyword target_lags: The number of past periods to lag from the target column. + :paramtype target_lags: ~azure.mgmt.machinelearningservices.models.TargetLags + :keyword target_rolling_window_size: The number of past periods used to create a rolling window + average of the target column. + :paramtype target_rolling_window_size: + ~azure.mgmt.machinelearningservices.models.TargetRollingWindowSize + :keyword time_column_name: The name of the time column. This parameter is required when + forecasting to specify the datetime column in the input data used for building the time series + and inferring its frequency. + :paramtype time_column_name: str + :keyword time_series_id_column_names: The names of columns used to group a timeseries. It can + be used to create multiple series. + If grain is not defined, the data set is assumed to be one time-series. This parameter is used + with task type forecasting. + :paramtype time_series_id_column_names: list[str] + :keyword use_stl: Configure STL Decomposition of the time-series target column. Known values + are: "None", "Season", and "SeasonTrend". + :paramtype use_stl: str or ~azure.mgmt.machinelearningservices.models.UseStl + """ + super().__init__(**kwargs) + self.country_or_region_for_holidays = country_or_region_for_holidays + self.cv_step_size = cv_step_size + self.feature_lags = feature_lags + self.forecast_horizon = forecast_horizon + self.frequency = frequency + self.seasonality = seasonality + self.short_series_handling_config = short_series_handling_config + self.target_aggregate_function = target_aggregate_function + self.target_lags = target_lags + self.target_rolling_window_size = target_rolling_window_size + self.time_column_name = time_column_name + self.time_series_id_column_names = time_series_id_column_names + self.use_stl = use_stl + + +class ForecastingTrainingSettings(TrainingSettings): + """Forecasting Training related configuration. + + :ivar enable_dnn_training: Enable recommendation of DNN models. + :vartype enable_dnn_training: bool + :ivar enable_model_explainability: Flag to turn on explainability on best model. + :vartype enable_model_explainability: bool + :ivar enable_onnx_compatible_models: Flag for enabling onnx compatible models. + :vartype enable_onnx_compatible_models: bool + :ivar enable_stack_ensemble: Enable stack ensemble run. + :vartype enable_stack_ensemble: bool + :ivar enable_vote_ensemble: Enable voting ensemble run. + :vartype enable_vote_ensemble: bool + :ivar ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model + generation, multiple fitted models from the previous child runs are downloaded. + Configure this parameter with a higher value than 300 secs, if more time is needed. + :vartype ensemble_model_download_timeout: ~datetime.timedelta + :ivar stack_ensemble_settings: Stack ensemble settings for stack ensemble run. + :vartype stack_ensemble_settings: + ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + :ivar allowed_training_algorithms: Allowed models for forecasting task. + :vartype allowed_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.ForecastingModels] + :ivar blocked_training_algorithms: Blocked models for forecasting task. + :vartype blocked_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.ForecastingModels] + """ + + _attribute_map = { + "enable_dnn_training": {"key": "enableDnnTraining", "type": "bool"}, + "enable_model_explainability": {"key": "enableModelExplainability", "type": "bool"}, + "enable_onnx_compatible_models": {"key": "enableOnnxCompatibleModels", "type": "bool"}, + "enable_stack_ensemble": {"key": "enableStackEnsemble", "type": "bool"}, + "enable_vote_ensemble": {"key": "enableVoteEnsemble", "type": "bool"}, + "ensemble_model_download_timeout": {"key": "ensembleModelDownloadTimeout", "type": "duration"}, + "stack_ensemble_settings": {"key": "stackEnsembleSettings", "type": "StackEnsembleSettings"}, + "allowed_training_algorithms": {"key": "allowedTrainingAlgorithms", "type": "[str]"}, + "blocked_training_algorithms": {"key": "blockedTrainingAlgorithms", "type": "[str]"}, + } + + def __init__( + self, + *, + enable_dnn_training: bool = False, + enable_model_explainability: bool = True, + enable_onnx_compatible_models: bool = False, + enable_stack_ensemble: bool = True, + enable_vote_ensemble: bool = True, + ensemble_model_download_timeout: datetime.timedelta = "PT5M", + stack_ensemble_settings: Optional["_models.StackEnsembleSettings"] = None, + allowed_training_algorithms: Optional[List[Union[str, "_models.ForecastingModels"]]] = None, + blocked_training_algorithms: Optional[List[Union[str, "_models.ForecastingModels"]]] = None, + **kwargs + ): + """ + :keyword enable_dnn_training: Enable recommendation of DNN models. + :paramtype enable_dnn_training: bool + :keyword enable_model_explainability: Flag to turn on explainability on best model. + :paramtype enable_model_explainability: bool + :keyword enable_onnx_compatible_models: Flag for enabling onnx compatible models. + :paramtype enable_onnx_compatible_models: bool + :keyword enable_stack_ensemble: Enable stack ensemble run. + :paramtype enable_stack_ensemble: bool + :keyword enable_vote_ensemble: Enable voting ensemble run. + :paramtype enable_vote_ensemble: bool + :keyword ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model + generation, multiple fitted models from the previous child runs are downloaded. + Configure this parameter with a higher value than 300 secs, if more time is needed. + :paramtype ensemble_model_download_timeout: ~datetime.timedelta + :keyword stack_ensemble_settings: Stack ensemble settings for stack ensemble run. + :paramtype stack_ensemble_settings: + ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + :keyword allowed_training_algorithms: Allowed models for forecasting task. + :paramtype allowed_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.ForecastingModels] + :keyword blocked_training_algorithms: Blocked models for forecasting task. + :paramtype blocked_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.ForecastingModels] + """ + super().__init__( + enable_dnn_training=enable_dnn_training, + enable_model_explainability=enable_model_explainability, + enable_onnx_compatible_models=enable_onnx_compatible_models, + enable_stack_ensemble=enable_stack_ensemble, + enable_vote_ensemble=enable_vote_ensemble, + ensemble_model_download_timeout=ensemble_model_download_timeout, + stack_ensemble_settings=stack_ensemble_settings, + **kwargs + ) + self.allowed_training_algorithms = allowed_training_algorithms + self.blocked_training_algorithms = blocked_training_algorithms + + +class FQDNEndpoint(_serialization.Model): + """FQDNEndpoint. + + :ivar domain_name: + :vartype domain_name: str + :ivar endpoint_details: + :vartype endpoint_details: list[~azure.mgmt.machinelearningservices.models.FQDNEndpointDetail] + """ + + _attribute_map = { + "domain_name": {"key": "domainName", "type": "str"}, + "endpoint_details": {"key": "endpointDetails", "type": "[FQDNEndpointDetail]"}, + } + + def __init__( + self, + *, + domain_name: Optional[str] = None, + endpoint_details: Optional[List["_models.FQDNEndpointDetail"]] = None, + **kwargs + ): + """ + :keyword domain_name: + :paramtype domain_name: str + :keyword endpoint_details: + :paramtype endpoint_details: + list[~azure.mgmt.machinelearningservices.models.FQDNEndpointDetail] + """ + super().__init__(**kwargs) + self.domain_name = domain_name + self.endpoint_details = endpoint_details + + +class FQDNEndpointDetail(_serialization.Model): + """FQDNEndpointDetail. + + :ivar port: + :vartype port: int + """ + + _attribute_map = { + "port": {"key": "port", "type": "int"}, + } + + def __init__(self, *, port: Optional[int] = None, **kwargs): + """ + :keyword port: + :paramtype port: int + """ + super().__init__(**kwargs) + self.port = port + + +class FQDNEndpoints(_serialization.Model): + """FQDNEndpoints. + + :ivar properties: + :vartype properties: ~azure.mgmt.machinelearningservices.models.FQDNEndpointsProperties + """ + + _attribute_map = { + "properties": {"key": "properties", "type": "FQDNEndpointsProperties"}, + } + + def __init__(self, *, properties: Optional["_models.FQDNEndpointsProperties"] = None, **kwargs): + """ + :keyword properties: + :paramtype properties: ~azure.mgmt.machinelearningservices.models.FQDNEndpointsProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class FQDNEndpointsProperties(_serialization.Model): + """FQDNEndpointsProperties. + + :ivar category: + :vartype category: str + :ivar endpoints: + :vartype endpoints: list[~azure.mgmt.machinelearningservices.models.FQDNEndpoint] + """ + + _attribute_map = { + "category": {"key": "category", "type": "str"}, + "endpoints": {"key": "endpoints", "type": "[FQDNEndpoint]"}, + } + + def __init__( + self, *, category: Optional[str] = None, endpoints: Optional[List["_models.FQDNEndpoint"]] = None, **kwargs + ): + """ + :keyword category: + :paramtype category: str + :keyword endpoints: + :paramtype endpoints: list[~azure.mgmt.machinelearningservices.models.FQDNEndpoint] + """ + super().__init__(**kwargs) + self.category = category + self.endpoints = endpoints + + +class GridSamplingAlgorithm(SamplingAlgorithm): + """Defines a Sampling Algorithm that exhaustively generates every value combination in the space. + + All required parameters must be populated in order to send to Azure. + + :ivar sampling_algorithm_type: [Required] The algorithm used for generating hyperparameter + values, along with configuration properties. Required. Known values are: "Grid", "Random", and + "Bayesian". + :vartype sampling_algorithm_type: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + """ + + _validation = { + "sampling_algorithm_type": {"required": True}, + } + + _attribute_map = { + "sampling_algorithm_type": {"key": "samplingAlgorithmType", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.sampling_algorithm_type = "Grid" # type: str + + +class HdfsDatastore(DatastoreProperties): + """HdfsDatastore. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar credentials: [Required] Account credentials. Required. + :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :ivar datastore_type: [Required] Storage type backing the datastore. Required. Known values + are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile", and "Hdfs". + :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType + :ivar is_default: Readonly property to indicate if datastore is the workspace default + datastore. + :vartype is_default: bool + :ivar hdfs_server_certificate: The TLS cert of the HDFS server. Needs to be a base64 encoded + string. Required if "Https" protocol is selected. + :vartype hdfs_server_certificate: str + :ivar name_node_address: [Required] IP Address or DNS HostName. Required. + :vartype name_node_address: str + :ivar protocol: Protocol used to communicate with the storage account (Https/Http). + :vartype protocol: str + """ + + _validation = { + "credentials": {"required": True}, + "datastore_type": {"required": True}, + "is_default": {"readonly": True}, + "name_node_address": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, + "datastore_type": {"key": "datastoreType", "type": "str"}, + "is_default": {"key": "isDefault", "type": "bool"}, + "hdfs_server_certificate": {"key": "hdfsServerCertificate", "type": "str"}, + "name_node_address": {"key": "nameNodeAddress", "type": "str"}, + "protocol": {"key": "protocol", "type": "str"}, + } + + def __init__( + self, + *, + credentials: "_models.DatastoreCredentials", + name_node_address: str, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + hdfs_server_certificate: Optional[str] = None, + protocol: str = "http", + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword credentials: [Required] Account credentials. Required. + :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :keyword hdfs_server_certificate: The TLS cert of the HDFS server. Needs to be a base64 encoded + string. Required if "Https" protocol is selected. + :paramtype hdfs_server_certificate: str + :keyword name_node_address: [Required] IP Address or DNS HostName. Required. + :paramtype name_node_address: str + :keyword protocol: Protocol used to communicate with the storage account (Https/Http). + :paramtype protocol: str + """ + super().__init__(description=description, properties=properties, tags=tags, credentials=credentials, **kwargs) + self.datastore_type = "Hdfs" # type: str + self.hdfs_server_certificate = hdfs_server_certificate + self.name_node_address = name_node_address + self.protocol = protocol + + +class HDInsightSchema(_serialization.Model): + """HDInsightSchema. + + :ivar properties: HDInsight compute properties. + :vartype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties + """ + + _attribute_map = { + "properties": {"key": "properties", "type": "HDInsightProperties"}, + } + + def __init__(self, *, properties: Optional["_models.HDInsightProperties"] = None, **kwargs): + """ + :keyword properties: HDInsight compute properties. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class HDInsight(Compute, HDInsightSchema): # pylint: disable=too-many-instance-attributes + """A HDInsight compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar properties: HDInsight compute properties. + :vartype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ComputeProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: ~datetime.datetime + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool + """ + + _validation = { + "compute_type": {"required": True}, + "compute_location": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, + "disable_local_auth": {"readonly": True}, + } + + _attribute_map = { + "properties": {"key": "properties", "type": "HDInsightProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, + } + + def __init__( + self, + *, + properties: Optional["_models.HDInsightProperties"] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + **kwargs + ): + """ + :keyword properties: HDInsight compute properties. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + """ + super().__init__(description=description, resource_id=resource_id, properties=properties, **kwargs) + self.properties = properties + self.compute_type = "HDInsight" # type: str + self.compute_location = None + self.provisioning_state = None + self.description = description + self.created_on = None + self.modified_on = None + self.resource_id = resource_id + self.provisioning_errors = None + self.is_attached_compute = None + self.disable_local_auth = None + + +class HDInsightProperties(_serialization.Model): + """HDInsight compute properties. + + :ivar ssh_port: Port open for ssh connections on the master node of the cluster. + :vartype ssh_port: int + :ivar address: Public IP address of the master node of the cluster. + :vartype address: str + :ivar administrator_account: Admin credentials for master node of the cluster. + :vartype administrator_account: + ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + """ + + _attribute_map = { + "ssh_port": {"key": "sshPort", "type": "int"}, + "address": {"key": "address", "type": "str"}, + "administrator_account": {"key": "administratorAccount", "type": "VirtualMachineSshCredentials"}, + } + + def __init__( + self, + *, + ssh_port: Optional[int] = None, + address: Optional[str] = None, + administrator_account: Optional["_models.VirtualMachineSshCredentials"] = None, + **kwargs + ): + """ + :keyword ssh_port: Port open for ssh connections on the master node of the cluster. + :paramtype ssh_port: int + :keyword address: Public IP address of the master node of the cluster. + :paramtype address: str + :keyword administrator_account: Admin credentials for master node of the cluster. + :paramtype administrator_account: + ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + """ + super().__init__(**kwargs) + self.ssh_port = ssh_port + self.address = address + self.administrator_account = administrator_account + + +class IdAssetReference(AssetReferenceBase): + """Reference to an asset via its ARM resource ID. + + All required parameters must be populated in order to send to Azure. + + :ivar reference_type: [Required] Specifies the type of asset reference. Required. Known values + are: "Id", "DataPath", and "OutputPath". + :vartype reference_type: str or ~azure.mgmt.machinelearningservices.models.ReferenceType + :ivar asset_id: [Required] ARM resource ID of the asset. Required. + :vartype asset_id: str + """ + + _validation = { + "reference_type": {"required": True}, + "asset_id": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "reference_type": {"key": "referenceType", "type": "str"}, + "asset_id": {"key": "assetId", "type": "str"}, + } + + def __init__(self, *, asset_id: str, **kwargs): + """ + :keyword asset_id: [Required] ARM resource ID of the asset. Required. + :paramtype asset_id: str + """ + super().__init__(**kwargs) + self.reference_type = "Id" # type: str + self.asset_id = asset_id + + +class IdentityForCmk(_serialization.Model): + """Identity that will be used to access key vault for encryption at rest. + + :ivar user_assigned_identity: The ArmId of the user assigned identity that will be used to + access the customer managed key vault. + :vartype user_assigned_identity: str + """ + + _attribute_map = { + "user_assigned_identity": {"key": "userAssignedIdentity", "type": "str"}, + } + + def __init__(self, *, user_assigned_identity: Optional[str] = None, **kwargs): + """ + :keyword user_assigned_identity: The ArmId of the user assigned identity that will be used to + access the customer managed key vault. + :paramtype user_assigned_identity: str + """ + super().__init__(**kwargs) + self.user_assigned_identity = user_assigned_identity + + +class IdleShutdownSetting(_serialization.Model): + """Stops compute instance after user defined period of inactivity. + + :ivar idle_time_before_shutdown: Time is defined in ISO8601 format. Minimum is 15 min, maximum + is 3 days. + :vartype idle_time_before_shutdown: str + """ + + _attribute_map = { + "idle_time_before_shutdown": {"key": "idleTimeBeforeShutdown", "type": "str"}, + } + + def __init__(self, *, idle_time_before_shutdown: Optional[str] = None, **kwargs): + """ + :keyword idle_time_before_shutdown: Time is defined in ISO8601 format. Minimum is 15 min, + maximum is 3 days. + :paramtype idle_time_before_shutdown: str + """ + super().__init__(**kwargs) + self.idle_time_before_shutdown = idle_time_before_shutdown + + +class Image(_serialization.Model): + """Image. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Type of the image. Possible values are: docker - For docker images. azureml - For + AzureML images. Known values are: "docker" and "azureml". + :vartype type: str or ~azure.mgmt.machinelearningservices.models.ImageType + :ivar reference: Image reference URL. + :vartype reference: str + """ + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "reference": {"key": "reference", "type": "str"}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, Any]] = None, + type: Union[str, "_models.ImageType"] = "docker", + reference: Optional[str] = None, + **kwargs + ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword type: Type of the image. Possible values are: docker - For docker images. azureml - + For AzureML images. Known values are: "docker" and "azureml". + :paramtype type: str or ~azure.mgmt.machinelearningservices.models.ImageType + :keyword reference: Image reference URL. + :paramtype reference: str + """ + super().__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.reference = reference + + +class ImageVertical(_serialization.Model): + """Abstract class for AutoML tasks that train image (computer vision) models - + such as Image Classification / Image Classification Multilabel / Image Object Detection / Image Instance Segmentation. + + All required parameters must be populated in order to send to Azure. + + :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + """ + + _validation = { + "limit_settings": {"required": True}, + } + + _attribute_map = { + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + } + + def __init__( + self, + *, + limit_settings: "_models.ImageLimitSettings", + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + **kwargs + ): + """ + :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + """ + super().__init__(**kwargs) + self.limit_settings = limit_settings + self.sweep_settings = sweep_settings + self.validation_data = validation_data + self.validation_data_size = validation_data_size + + +class ImageClassificationBase(ImageVertical): + """ImageClassificationBase. + + All required parameters must be populated in order to send to Azure. + + :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar model_settings: Settings used for training the model. + :vartype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] + """ + + _validation = { + "limit_settings": {"required": True}, + } + + _attribute_map = { + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsClassification"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsClassification]"}, + } + + def __init__( + self, + *, + limit_settings: "_models.ImageLimitSettings", + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + model_settings: Optional["_models.ImageModelSettingsClassification"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsClassification"]] = None, + **kwargs + ): + """ + :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword model_settings: Settings used for training the model. + :paramtype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] + """ + super().__init__( + limit_settings=limit_settings, + sweep_settings=sweep_settings, + validation_data=validation_data, + validation_data_size=validation_data_size, + **kwargs + ) + self.model_settings = model_settings + self.search_space = search_space + + +class ImageClassification(ImageClassificationBase, AutoMLVertical): # pylint: disable=too-many-instance-attributes + """Image Classification. Multi-class image classification is used when an image is classified with only a single label + from a set of classes - e.g. each image is classified as either an image of a 'cat' or a 'dog' or a 'duck'. + + All required parameters must be populated in order to send to Azure. + + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar model_settings: Settings used for training the model. + :vartype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] + :ivar primary_metric: Primary metric to optimize for this task. Known values are: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", and + "PrecisionScoreWeighted". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics + """ + + _validation = { + "task_type": {"required": True}, + "training_data": {"required": True}, + "limit_settings": {"required": True}, + } + + _attribute_map = { + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsClassification"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsClassification]"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, + } + + def __init__( + self, + *, + training_data: "_models.MLTableJobInput", + limit_settings: "_models.ImageLimitSettings", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + model_settings: Optional["_models.ImageModelSettingsClassification"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsClassification"]] = None, + primary_metric: Optional[Union[str, "_models.ClassificationPrimaryMetrics"]] = None, + **kwargs + ): + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword model_settings: Settings used for training the model. + :paramtype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] + :keyword primary_metric: Primary metric to optimize for this task. Known values are: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", and + "PrecisionScoreWeighted". + :paramtype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics + """ + super().__init__( + limit_settings=limit_settings, + sweep_settings=sweep_settings, + validation_data=validation_data, + validation_data_size=validation_data_size, + model_settings=model_settings, + search_space=search_space, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type = "ImageClassification" # type: str + self.training_data = training_data + self.primary_metric = primary_metric + self.limit_settings = limit_settings + self.sweep_settings = sweep_settings + self.validation_data = validation_data + self.validation_data_size = validation_data_size + self.model_settings = model_settings + self.search_space = search_space + + +class ImageClassificationMultilabel( + ImageClassificationBase, AutoMLVertical +): # pylint: disable=too-many-instance-attributes + """Image Classification Multilabel. Multi-label image classification is used when an image could have one or more labels + from a set of labels - e.g. an image could be labeled with both 'cat' and 'dog'. + + All required parameters must be populated in order to send to Azure. + + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar model_settings: Settings used for training the model. + :vartype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] + :ivar primary_metric: Primary metric to optimize for this task. Known values are: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", + "PrecisionScoreWeighted", and "IOU". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationMultilabelPrimaryMetrics + """ + + _validation = { + "task_type": {"required": True}, + "training_data": {"required": True}, + "limit_settings": {"required": True}, + } + + _attribute_map = { + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsClassification"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsClassification]"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, + } + + def __init__( + self, + *, + training_data: "_models.MLTableJobInput", + limit_settings: "_models.ImageLimitSettings", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + model_settings: Optional["_models.ImageModelSettingsClassification"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsClassification"]] = None, + primary_metric: Optional[Union[str, "_models.ClassificationMultilabelPrimaryMetrics"]] = None, + **kwargs + ): + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword model_settings: Settings used for training the model. + :paramtype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] + :keyword primary_metric: Primary metric to optimize for this task. Known values are: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", + "PrecisionScoreWeighted", and "IOU". + :paramtype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationMultilabelPrimaryMetrics + """ + super().__init__( + limit_settings=limit_settings, + sweep_settings=sweep_settings, + validation_data=validation_data, + validation_data_size=validation_data_size, + model_settings=model_settings, + search_space=search_space, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type = "ImageClassificationMultilabel" # type: str + self.training_data = training_data + self.primary_metric = primary_metric + self.limit_settings = limit_settings + self.sweep_settings = sweep_settings + self.validation_data = validation_data + self.validation_data_size = validation_data_size + self.model_settings = model_settings + self.search_space = search_space + + +class ImageObjectDetectionBase(ImageVertical): + """ImageObjectDetectionBase. + + All required parameters must be populated in order to send to Azure. + + :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar model_settings: Settings used for training the model. + :vartype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] + """ + + _validation = { + "limit_settings": {"required": True}, + } + + _attribute_map = { + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsObjectDetection"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsObjectDetection]"}, + } + + def __init__( + self, + *, + limit_settings: "_models.ImageLimitSettings", + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + model_settings: Optional["_models.ImageModelSettingsObjectDetection"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsObjectDetection"]] = None, + **kwargs + ): + """ + :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword model_settings: Settings used for training the model. + :paramtype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] + """ + super().__init__( + limit_settings=limit_settings, + sweep_settings=sweep_settings, + validation_data=validation_data, + validation_data_size=validation_data_size, + **kwargs + ) + self.model_settings = model_settings + self.search_space = search_space + + +class ImageInstanceSegmentation( + ImageObjectDetectionBase, AutoMLVertical +): # pylint: disable=too-many-instance-attributes + """Image Instance Segmentation. Instance segmentation is used to identify objects in an image at the pixel level, + drawing a polygon around each object in the image. + + All required parameters must be populated in order to send to Azure. + + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar model_settings: Settings used for training the model. + :vartype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] + :ivar primary_metric: Primary metric to optimize for this task. "MeanAveragePrecision" + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.InstanceSegmentationPrimaryMetrics + """ + + _validation = { + "task_type": {"required": True}, + "training_data": {"required": True}, + "limit_settings": {"required": True}, + } + + _attribute_map = { + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsObjectDetection"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsObjectDetection]"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, + } + + def __init__( + self, + *, + training_data: "_models.MLTableJobInput", + limit_settings: "_models.ImageLimitSettings", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + model_settings: Optional["_models.ImageModelSettingsObjectDetection"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsObjectDetection"]] = None, + primary_metric: Optional[Union[str, "_models.InstanceSegmentationPrimaryMetrics"]] = None, + **kwargs + ): + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword model_settings: Settings used for training the model. + :paramtype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] + :keyword primary_metric: Primary metric to optimize for this task. "MeanAveragePrecision" + :paramtype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.InstanceSegmentationPrimaryMetrics + """ + super().__init__( + limit_settings=limit_settings, + sweep_settings=sweep_settings, + validation_data=validation_data, + validation_data_size=validation_data_size, + model_settings=model_settings, + search_space=search_space, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type = "ImageInstanceSegmentation" # type: str + self.training_data = training_data + self.primary_metric = primary_metric + self.limit_settings = limit_settings + self.sweep_settings = sweep_settings + self.validation_data = validation_data + self.validation_data_size = validation_data_size + self.model_settings = model_settings + self.search_space = search_space + + +class ImageLimitSettings(_serialization.Model): + """Limit settings for the AutoML job. + + :ivar max_concurrent_trials: Maximum number of concurrent AutoML iterations. + :vartype max_concurrent_trials: int + :ivar max_trials: Maximum number of AutoML iterations. + :vartype max_trials: int + :ivar timeout: AutoML job timeout. + :vartype timeout: ~datetime.timedelta + """ + + _attribute_map = { + "max_concurrent_trials": {"key": "maxConcurrentTrials", "type": "int"}, + "max_trials": {"key": "maxTrials", "type": "int"}, + "timeout": {"key": "timeout", "type": "duration"}, + } + + def __init__( + self, *, max_concurrent_trials: int = 1, max_trials: int = 1, timeout: datetime.timedelta = "P7D", **kwargs + ): + """ + :keyword max_concurrent_trials: Maximum number of concurrent AutoML iterations. + :paramtype max_concurrent_trials: int + :keyword max_trials: Maximum number of AutoML iterations. + :paramtype max_trials: int + :keyword timeout: AutoML job timeout. + :paramtype timeout: ~datetime.timedelta + """ + super().__init__(**kwargs) + self.max_concurrent_trials = max_concurrent_trials + self.max_trials = max_trials + self.timeout = timeout + + +class ImageMetadata(_serialization.Model): + """Returns metadata about the operating system image for this compute instance. + + :ivar current_image_version: Specifies the current operating system image version this compute + instance is running on. + :vartype current_image_version: str + :ivar latest_image_version: Specifies the latest available operating system image version. + :vartype latest_image_version: str + :ivar is_latest_os_image_version: Specifies whether this compute instance is running on the + latest operating system image. + :vartype is_latest_os_image_version: bool + """ + + _attribute_map = { + "current_image_version": {"key": "currentImageVersion", "type": "str"}, + "latest_image_version": {"key": "latestImageVersion", "type": "str"}, + "is_latest_os_image_version": {"key": "isLatestOsImageVersion", "type": "bool"}, + } + + def __init__( + self, + *, + current_image_version: Optional[str] = None, + latest_image_version: Optional[str] = None, + is_latest_os_image_version: Optional[bool] = None, + **kwargs + ): + """ + :keyword current_image_version: Specifies the current operating system image version this + compute instance is running on. + :paramtype current_image_version: str + :keyword latest_image_version: Specifies the latest available operating system image version. + :paramtype latest_image_version: str + :keyword is_latest_os_image_version: Specifies whether this compute instance is running on the + latest operating system image. + :paramtype is_latest_os_image_version: bool + """ + super().__init__(**kwargs) + self.current_image_version = current_image_version + self.latest_image_version = latest_image_version + self.is_latest_os_image_version = is_latest_os_image_version + + +class ImageModelDistributionSettings(_serialization.Model): # pylint: disable=too-many-instance-attributes + """Distribution expressions to sweep over values of model settings. + + :code:` + Some examples are: + + ModelName = "choice('seresnext', 'resnest50')"; + LearningRate = "uniform(0.001, 0.01)"; + LayersToFreeze = "choice(0, 2)"; + ` + All distributions can be specified as distribution_name(min, max) or choice(val1, val2, ..., valn) + where distribution name can be: uniform, quniform, loguniform, etc + For more details on how to compose distribution expressions please check the documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: str + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: str + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: str + :ivar distributed: Whether to use distributer training. + :vartype distributed: str + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: str + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: str + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: str + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: str + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: str + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: str + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: str + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: str + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. + :vartype learning_rate_scheduler: str + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: str + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: str + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: str + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: str + :ivar optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. + :vartype optimizer: str + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: str + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: str + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: str + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: str + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: str + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: str + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: str + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: str + """ + + _attribute_map = { + "ams_gradient": {"key": "amsGradient", "type": "str"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "str"}, + "beta2": {"key": "beta2", "type": "str"}, + "distributed": {"key": "distributed", "type": "str"}, + "early_stopping": {"key": "earlyStopping", "type": "str"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "str"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "str"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "str"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "str"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "str"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "str"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "str"}, + "nesterov": {"key": "nesterov", "type": "str"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "str"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "str"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "str"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "str"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "str"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "str"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "str"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "str"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "str"}, + "weight_decay": {"key": "weightDecay", "type": "str"}, + } + + def __init__( # pylint: disable=too-many-locals + self, + *, + ams_gradient: Optional[str] = None, + augmentations: Optional[str] = None, + beta1: Optional[str] = None, + beta2: Optional[str] = None, + distributed: Optional[str] = None, + early_stopping: Optional[str] = None, + early_stopping_delay: Optional[str] = None, + early_stopping_patience: Optional[str] = None, + enable_onnx_normalization: Optional[str] = None, + evaluation_frequency: Optional[str] = None, + gradient_accumulation_step: Optional[str] = None, + layers_to_freeze: Optional[str] = None, + learning_rate: Optional[str] = None, + learning_rate_scheduler: Optional[str] = None, + model_name: Optional[str] = None, + momentum: Optional[str] = None, + nesterov: Optional[str] = None, + number_of_epochs: Optional[str] = None, + number_of_workers: Optional[str] = None, + optimizer: Optional[str] = None, + random_seed: Optional[str] = None, + step_lr_gamma: Optional[str] = None, + step_lr_step_size: Optional[str] = None, + training_batch_size: Optional[str] = None, + validation_batch_size: Optional[str] = None, + warmup_cosine_lr_cycles: Optional[str] = None, + warmup_cosine_lr_warmup_epochs: Optional[str] = None, + weight_decay: Optional[str] = None, + **kwargs + ): + """ + :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :paramtype ams_gradient: str + :keyword augmentations: Settings for using Augmentations. + :paramtype augmentations: str + :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta1: str + :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta2: str + :keyword distributed: Whether to use distributer training. + :paramtype distributed: str + :keyword early_stopping: Enable early stopping logic during training. + :paramtype early_stopping: str + :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait + before primary metric improvement + is tracked for early stopping. Must be a positive integer. + :paramtype early_stopping_delay: str + :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :paramtype early_stopping_patience: str + :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. + :paramtype enable_onnx_normalization: str + :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. + Must be a positive integer. + :paramtype evaluation_frequency: str + :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :paramtype gradient_accumulation_step: str + :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive + integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype layers_to_freeze: str + :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :paramtype learning_rate: str + :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. + :paramtype learning_rate_scheduler: str + :keyword model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype model_name: str + :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, + 1]. + :paramtype momentum: str + :keyword nesterov: Enable nesterov when optimizer is 'sgd'. + :paramtype nesterov: str + :keyword number_of_epochs: Number of training epochs. Must be a positive integer. + :paramtype number_of_epochs: str + :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. + :paramtype number_of_workers: str + :keyword optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. + :paramtype optimizer: str + :keyword random_seed: Random seed to be used when using deterministic training. + :paramtype random_seed: str + :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float + in the range [0, 1]. + :paramtype step_lr_gamma: str + :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be + a positive integer. + :paramtype step_lr_step_size: str + :keyword training_batch_size: Training batch size. Must be a positive integer. + :paramtype training_batch_size: str + :keyword validation_batch_size: Validation batch size. Must be a positive integer. + :paramtype validation_batch_size: str + :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :paramtype warmup_cosine_lr_cycles: str + :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :paramtype warmup_cosine_lr_warmup_epochs: str + :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must + be a float in the range[0, 1]. + :paramtype weight_decay: str + """ + super().__init__(**kwargs) + self.ams_gradient = ams_gradient + self.augmentations = augmentations + self.beta1 = beta1 + self.beta2 = beta2 + self.distributed = distributed + self.early_stopping = early_stopping + self.early_stopping_delay = early_stopping_delay + self.early_stopping_patience = early_stopping_patience + self.enable_onnx_normalization = enable_onnx_normalization + self.evaluation_frequency = evaluation_frequency + self.gradient_accumulation_step = gradient_accumulation_step + self.layers_to_freeze = layers_to_freeze + self.learning_rate = learning_rate + self.learning_rate_scheduler = learning_rate_scheduler + self.model_name = model_name + self.momentum = momentum + self.nesterov = nesterov + self.number_of_epochs = number_of_epochs + self.number_of_workers = number_of_workers + self.optimizer = optimizer + self.random_seed = random_seed + self.step_lr_gamma = step_lr_gamma + self.step_lr_step_size = step_lr_step_size + self.training_batch_size = training_batch_size + self.validation_batch_size = validation_batch_size + self.warmup_cosine_lr_cycles = warmup_cosine_lr_cycles + self.warmup_cosine_lr_warmup_epochs = warmup_cosine_lr_warmup_epochs + self.weight_decay = weight_decay + + +class ImageModelDistributionSettingsClassification( + ImageModelDistributionSettings +): # pylint: disable=too-many-instance-attributes + """Distribution expressions to sweep over values of model settings. + + :code:` + Some examples are: + + ModelName = "choice('seresnext', 'resnest50')"; + LearningRate = "uniform(0.001, 0.01)"; + LayersToFreeze = "choice(0, 2)"; + ` + For more details on how to compose distribution expressions please check the documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: str + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: str + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: str + :ivar distributed: Whether to use distributer training. + :vartype distributed: str + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: str + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: str + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: str + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: str + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: str + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: str + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: str + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: str + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. + :vartype learning_rate_scheduler: str + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: str + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: str + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: str + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: str + :ivar optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. + :vartype optimizer: str + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: str + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: str + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: str + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: str + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: str + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: str + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: str + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: str + :ivar training_crop_size: Image crop size that is input to the neural network for the training + dataset. Must be a positive integer. + :vartype training_crop_size: str + :ivar validation_crop_size: Image crop size that is input to the neural network for the + validation dataset. Must be a positive integer. + :vartype validation_crop_size: str + :ivar validation_resize_size: Image size to which to resize before cropping for validation + dataset. Must be a positive integer. + :vartype validation_resize_size: str + :ivar weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. + 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be + 0 or 1 or 2. + :vartype weighted_loss: str + """ + + _attribute_map = { + "ams_gradient": {"key": "amsGradient", "type": "str"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "str"}, + "beta2": {"key": "beta2", "type": "str"}, + "distributed": {"key": "distributed", "type": "str"}, + "early_stopping": {"key": "earlyStopping", "type": "str"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "str"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "str"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "str"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "str"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "str"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "str"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "str"}, + "nesterov": {"key": "nesterov", "type": "str"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "str"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "str"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "str"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "str"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "str"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "str"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "str"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "str"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "str"}, + "weight_decay": {"key": "weightDecay", "type": "str"}, + "training_crop_size": {"key": "trainingCropSize", "type": "str"}, + "validation_crop_size": {"key": "validationCropSize", "type": "str"}, + "validation_resize_size": {"key": "validationResizeSize", "type": "str"}, + "weighted_loss": {"key": "weightedLoss", "type": "str"}, + } + + def __init__( # pylint: disable=too-many-locals + self, + *, + ams_gradient: Optional[str] = None, + augmentations: Optional[str] = None, + beta1: Optional[str] = None, + beta2: Optional[str] = None, + distributed: Optional[str] = None, + early_stopping: Optional[str] = None, + early_stopping_delay: Optional[str] = None, + early_stopping_patience: Optional[str] = None, + enable_onnx_normalization: Optional[str] = None, + evaluation_frequency: Optional[str] = None, + gradient_accumulation_step: Optional[str] = None, + layers_to_freeze: Optional[str] = None, + learning_rate: Optional[str] = None, + learning_rate_scheduler: Optional[str] = None, + model_name: Optional[str] = None, + momentum: Optional[str] = None, + nesterov: Optional[str] = None, + number_of_epochs: Optional[str] = None, + number_of_workers: Optional[str] = None, + optimizer: Optional[str] = None, + random_seed: Optional[str] = None, + step_lr_gamma: Optional[str] = None, + step_lr_step_size: Optional[str] = None, + training_batch_size: Optional[str] = None, + validation_batch_size: Optional[str] = None, + warmup_cosine_lr_cycles: Optional[str] = None, + warmup_cosine_lr_warmup_epochs: Optional[str] = None, + weight_decay: Optional[str] = None, + training_crop_size: Optional[str] = None, + validation_crop_size: Optional[str] = None, + validation_resize_size: Optional[str] = None, + weighted_loss: Optional[str] = None, + **kwargs + ): + """ + :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :paramtype ams_gradient: str + :keyword augmentations: Settings for using Augmentations. + :paramtype augmentations: str + :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta1: str + :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta2: str + :keyword distributed: Whether to use distributer training. + :paramtype distributed: str + :keyword early_stopping: Enable early stopping logic during training. + :paramtype early_stopping: str + :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait + before primary metric improvement + is tracked for early stopping. Must be a positive integer. + :paramtype early_stopping_delay: str + :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :paramtype early_stopping_patience: str + :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. + :paramtype enable_onnx_normalization: str + :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. + Must be a positive integer. + :paramtype evaluation_frequency: str + :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :paramtype gradient_accumulation_step: str + :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive + integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype layers_to_freeze: str + :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :paramtype learning_rate: str + :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. + :paramtype learning_rate_scheduler: str + :keyword model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype model_name: str + :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, + 1]. + :paramtype momentum: str + :keyword nesterov: Enable nesterov when optimizer is 'sgd'. + :paramtype nesterov: str + :keyword number_of_epochs: Number of training epochs. Must be a positive integer. + :paramtype number_of_epochs: str + :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. + :paramtype number_of_workers: str + :keyword optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. + :paramtype optimizer: str + :keyword random_seed: Random seed to be used when using deterministic training. + :paramtype random_seed: str + :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float + in the range [0, 1]. + :paramtype step_lr_gamma: str + :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be + a positive integer. + :paramtype step_lr_step_size: str + :keyword training_batch_size: Training batch size. Must be a positive integer. + :paramtype training_batch_size: str + :keyword validation_batch_size: Validation batch size. Must be a positive integer. + :paramtype validation_batch_size: str + :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :paramtype warmup_cosine_lr_cycles: str + :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :paramtype warmup_cosine_lr_warmup_epochs: str + :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must + be a float in the range[0, 1]. + :paramtype weight_decay: str + :keyword training_crop_size: Image crop size that is input to the neural network for the + training dataset. Must be a positive integer. + :paramtype training_crop_size: str + :keyword validation_crop_size: Image crop size that is input to the neural network for the + validation dataset. Must be a positive integer. + :paramtype validation_crop_size: str + :keyword validation_resize_size: Image size to which to resize before cropping for validation + dataset. Must be a positive integer. + :paramtype validation_resize_size: str + :keyword weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. + 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be + 0 or 1 or 2. + :paramtype weighted_loss: str + """ + super().__init__( + ams_gradient=ams_gradient, + augmentations=augmentations, + beta1=beta1, + beta2=beta2, + distributed=distributed, + early_stopping=early_stopping, + early_stopping_delay=early_stopping_delay, + early_stopping_patience=early_stopping_patience, + enable_onnx_normalization=enable_onnx_normalization, + evaluation_frequency=evaluation_frequency, + gradient_accumulation_step=gradient_accumulation_step, + layers_to_freeze=layers_to_freeze, + learning_rate=learning_rate, + learning_rate_scheduler=learning_rate_scheduler, + model_name=model_name, + momentum=momentum, + nesterov=nesterov, + number_of_epochs=number_of_epochs, + number_of_workers=number_of_workers, + optimizer=optimizer, + random_seed=random_seed, + step_lr_gamma=step_lr_gamma, + step_lr_step_size=step_lr_step_size, + training_batch_size=training_batch_size, + validation_batch_size=validation_batch_size, + warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, + warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, + weight_decay=weight_decay, + **kwargs + ) + self.training_crop_size = training_crop_size + self.validation_crop_size = validation_crop_size + self.validation_resize_size = validation_resize_size + self.weighted_loss = weighted_loss + + +class ImageModelDistributionSettingsObjectDetection( + ImageModelDistributionSettings +): # pylint: disable=too-many-instance-attributes + """Distribution expressions to sweep over values of model settings. + + :code:` + Some examples are: + + ModelName = "choice('seresnext', 'resnest50')"; + LearningRate = "uniform(0.001, 0.01)"; + LayersToFreeze = "choice(0, 2)"; + ` + For more details on how to compose distribution expressions please check the documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: str + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: str + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: str + :ivar distributed: Whether to use distributer training. + :vartype distributed: str + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: str + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: str + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: str + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: str + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: str + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: str + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: str + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: str + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. + :vartype learning_rate_scheduler: str + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: str + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: str + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: str + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: str + :ivar optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. + :vartype optimizer: str + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: str + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: str + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: str + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: str + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: str + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: str + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: str + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: str + :ivar box_detections_per_image: Maximum number of detections per image, for all classes. Must + be a positive integer. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype box_detections_per_image: str + :ivar box_score_threshold: During inference, only return proposals with a classification score + greater than + BoxScoreThreshold. Must be a float in the range[0, 1]. + :vartype box_score_threshold: str + :ivar image_size: Image size for train and validation. Must be a positive integer. + Note: The training run may get into CUDA OOM if the size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. + :vartype image_size: str + :ivar max_size: Maximum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype max_size: str + :ivar min_size: Minimum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype min_size: str + :ivar model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. + Note: training run may get into CUDA OOM if the model size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. + :vartype model_size: str + :ivar multi_scale: Enable multi-scale image by varying image size by +/- 50%. + Note: training run may get into CUDA OOM if no sufficient GPU memory. + Note: This settings is only supported for the 'yolov5' algorithm. + :vartype multi_scale: str + :ivar nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be + float in the range [0, 1]. + :vartype nms_iou_threshold: str + :ivar tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must not + be + None to enable small object detection logic. A string containing two integers in mxn format. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype tile_grid_size: str + :ivar tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be float + in the range [0, 1). + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype tile_overlap_ratio: str + :ivar tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging + predictions from tiles and image. + Used in validation/ inference. Must be float in the range [0, 1]. + Note: This settings is not supported for the 'yolov5' algorithm. + NMS: Non-maximum suppression. + :vartype tile_predictions_nms_threshold: str + :ivar validation_iou_threshold: IOU threshold to use when computing validation metric. Must be + float in the range [0, 1]. + :vartype validation_iou_threshold: str + :ivar validation_metric_type: Metric computation method to use for validation metrics. Must be + 'none', 'coco', 'voc', or 'coco_voc'. + :vartype validation_metric_type: str + """ + + _attribute_map = { + "ams_gradient": {"key": "amsGradient", "type": "str"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "str"}, + "beta2": {"key": "beta2", "type": "str"}, + "distributed": {"key": "distributed", "type": "str"}, + "early_stopping": {"key": "earlyStopping", "type": "str"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "str"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "str"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "str"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "str"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "str"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "str"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "str"}, + "nesterov": {"key": "nesterov", "type": "str"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "str"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "str"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "str"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "str"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "str"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "str"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "str"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "str"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "str"}, + "weight_decay": {"key": "weightDecay", "type": "str"}, + "box_detections_per_image": {"key": "boxDetectionsPerImage", "type": "str"}, + "box_score_threshold": {"key": "boxScoreThreshold", "type": "str"}, + "image_size": {"key": "imageSize", "type": "str"}, + "max_size": {"key": "maxSize", "type": "str"}, + "min_size": {"key": "minSize", "type": "str"}, + "model_size": {"key": "modelSize", "type": "str"}, + "multi_scale": {"key": "multiScale", "type": "str"}, + "nms_iou_threshold": {"key": "nmsIouThreshold", "type": "str"}, + "tile_grid_size": {"key": "tileGridSize", "type": "str"}, + "tile_overlap_ratio": {"key": "tileOverlapRatio", "type": "str"}, + "tile_predictions_nms_threshold": {"key": "tilePredictionsNmsThreshold", "type": "str"}, + "validation_iou_threshold": {"key": "validationIouThreshold", "type": "str"}, + "validation_metric_type": {"key": "validationMetricType", "type": "str"}, + } + + def __init__( # pylint: disable=too-many-locals + self, + *, + ams_gradient: Optional[str] = None, + augmentations: Optional[str] = None, + beta1: Optional[str] = None, + beta2: Optional[str] = None, + distributed: Optional[str] = None, + early_stopping: Optional[str] = None, + early_stopping_delay: Optional[str] = None, + early_stopping_patience: Optional[str] = None, + enable_onnx_normalization: Optional[str] = None, + evaluation_frequency: Optional[str] = None, + gradient_accumulation_step: Optional[str] = None, + layers_to_freeze: Optional[str] = None, + learning_rate: Optional[str] = None, + learning_rate_scheduler: Optional[str] = None, + model_name: Optional[str] = None, + momentum: Optional[str] = None, + nesterov: Optional[str] = None, + number_of_epochs: Optional[str] = None, + number_of_workers: Optional[str] = None, + optimizer: Optional[str] = None, + random_seed: Optional[str] = None, + step_lr_gamma: Optional[str] = None, + step_lr_step_size: Optional[str] = None, + training_batch_size: Optional[str] = None, + validation_batch_size: Optional[str] = None, + warmup_cosine_lr_cycles: Optional[str] = None, + warmup_cosine_lr_warmup_epochs: Optional[str] = None, + weight_decay: Optional[str] = None, + box_detections_per_image: Optional[str] = None, + box_score_threshold: Optional[str] = None, + image_size: Optional[str] = None, + max_size: Optional[str] = None, + min_size: Optional[str] = None, + model_size: Optional[str] = None, + multi_scale: Optional[str] = None, + nms_iou_threshold: Optional[str] = None, + tile_grid_size: Optional[str] = None, + tile_overlap_ratio: Optional[str] = None, + tile_predictions_nms_threshold: Optional[str] = None, + validation_iou_threshold: Optional[str] = None, + validation_metric_type: Optional[str] = None, + **kwargs + ): + """ + :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :paramtype ams_gradient: str + :keyword augmentations: Settings for using Augmentations. + :paramtype augmentations: str + :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta1: str + :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta2: str + :keyword distributed: Whether to use distributer training. + :paramtype distributed: str + :keyword early_stopping: Enable early stopping logic during training. + :paramtype early_stopping: str + :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait + before primary metric improvement + is tracked for early stopping. Must be a positive integer. + :paramtype early_stopping_delay: str + :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :paramtype early_stopping_patience: str + :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. + :paramtype enable_onnx_normalization: str + :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. + Must be a positive integer. + :paramtype evaluation_frequency: str + :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :paramtype gradient_accumulation_step: str + :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive + integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype layers_to_freeze: str + :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :paramtype learning_rate: str + :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. + :paramtype learning_rate_scheduler: str + :keyword model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype model_name: str + :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, + 1]. + :paramtype momentum: str + :keyword nesterov: Enable nesterov when optimizer is 'sgd'. + :paramtype nesterov: str + :keyword number_of_epochs: Number of training epochs. Must be a positive integer. + :paramtype number_of_epochs: str + :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. + :paramtype number_of_workers: str + :keyword optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. + :paramtype optimizer: str + :keyword random_seed: Random seed to be used when using deterministic training. + :paramtype random_seed: str + :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float + in the range [0, 1]. + :paramtype step_lr_gamma: str + :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be + a positive integer. + :paramtype step_lr_step_size: str + :keyword training_batch_size: Training batch size. Must be a positive integer. + :paramtype training_batch_size: str + :keyword validation_batch_size: Validation batch size. Must be a positive integer. + :paramtype validation_batch_size: str + :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :paramtype warmup_cosine_lr_cycles: str + :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :paramtype warmup_cosine_lr_warmup_epochs: str + :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must + be a float in the range[0, 1]. + :paramtype weight_decay: str + :keyword box_detections_per_image: Maximum number of detections per image, for all classes. + Must be a positive integer. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype box_detections_per_image: str + :keyword box_score_threshold: During inference, only return proposals with a classification + score greater than + BoxScoreThreshold. Must be a float in the range[0, 1]. + :paramtype box_score_threshold: str + :keyword image_size: Image size for train and validation. Must be a positive integer. + Note: The training run may get into CUDA OOM if the size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. + :paramtype image_size: str + :keyword max_size: Maximum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype max_size: str + :keyword min_size: Minimum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype min_size: str + :keyword model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. + Note: training run may get into CUDA OOM if the model size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. + :paramtype model_size: str + :keyword multi_scale: Enable multi-scale image by varying image size by +/- 50%. + Note: training run may get into CUDA OOM if no sufficient GPU memory. + Note: This settings is only supported for the 'yolov5' algorithm. + :paramtype multi_scale: str + :keyword nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be + float in the range [0, 1]. + :paramtype nms_iou_threshold: str + :keyword tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must + not be + None to enable small object detection logic. A string containing two integers in mxn format. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype tile_grid_size: str + :keyword tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be + float in the range [0, 1). + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype tile_overlap_ratio: str + :keyword tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging + predictions from tiles and image. + Used in validation/ inference. Must be float in the range [0, 1]. + Note: This settings is not supported for the 'yolov5' algorithm. + NMS: Non-maximum suppression. + :paramtype tile_predictions_nms_threshold: str + :keyword validation_iou_threshold: IOU threshold to use when computing validation metric. Must + be float in the range [0, 1]. + :paramtype validation_iou_threshold: str + :keyword validation_metric_type: Metric computation method to use for validation metrics. Must + be 'none', 'coco', 'voc', or 'coco_voc'. + :paramtype validation_metric_type: str + """ + super().__init__( + ams_gradient=ams_gradient, + augmentations=augmentations, + beta1=beta1, + beta2=beta2, + distributed=distributed, + early_stopping=early_stopping, + early_stopping_delay=early_stopping_delay, + early_stopping_patience=early_stopping_patience, + enable_onnx_normalization=enable_onnx_normalization, + evaluation_frequency=evaluation_frequency, + gradient_accumulation_step=gradient_accumulation_step, + layers_to_freeze=layers_to_freeze, + learning_rate=learning_rate, + learning_rate_scheduler=learning_rate_scheduler, + model_name=model_name, + momentum=momentum, + nesterov=nesterov, + number_of_epochs=number_of_epochs, + number_of_workers=number_of_workers, + optimizer=optimizer, + random_seed=random_seed, + step_lr_gamma=step_lr_gamma, + step_lr_step_size=step_lr_step_size, + training_batch_size=training_batch_size, + validation_batch_size=validation_batch_size, + warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, + warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, + weight_decay=weight_decay, + **kwargs + ) + self.box_detections_per_image = box_detections_per_image + self.box_score_threshold = box_score_threshold + self.image_size = image_size + self.max_size = max_size + self.min_size = min_size + self.model_size = model_size + self.multi_scale = multi_scale + self.nms_iou_threshold = nms_iou_threshold + self.tile_grid_size = tile_grid_size + self.tile_overlap_ratio = tile_overlap_ratio + self.tile_predictions_nms_threshold = tile_predictions_nms_threshold + self.validation_iou_threshold = validation_iou_threshold + self.validation_metric_type = validation_metric_type + + +class ImageModelSettings(_serialization.Model): # pylint: disable=too-many-instance-attributes + """Settings used for training the model. + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar advanced_settings: Settings for advanced scenarios. + :vartype advanced_settings: str + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: bool + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: float + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: float + :ivar checkpoint_frequency: Frequency to store model checkpoints. Must be a positive integer. + :vartype checkpoint_frequency: int + :ivar checkpoint_model: The pretrained checkpoint model for incremental training. + :vartype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput + :ivar checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for + incremental training. + :vartype checkpoint_run_id: str + :ivar distributed: Whether to use distributed training. + :vartype distributed: bool + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: bool + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: int + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: int + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: bool + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: int + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: int + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: int + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: float + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. Known values are: "None", "WarmupCosine", and "Step". + :vartype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.LearningRateScheduler + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: float + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: bool + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: int + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: int + :ivar optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", and "Adamw". + :vartype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: int + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: float + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: int + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: int + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: int + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: float + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: int + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: float + """ + + _attribute_map = { + "advanced_settings": {"key": "advancedSettings", "type": "str"}, + "ams_gradient": {"key": "amsGradient", "type": "bool"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "float"}, + "beta2": {"key": "beta2", "type": "float"}, + "checkpoint_frequency": {"key": "checkpointFrequency", "type": "int"}, + "checkpoint_model": {"key": "checkpointModel", "type": "MLFlowModelJobInput"}, + "checkpoint_run_id": {"key": "checkpointRunId", "type": "str"}, + "distributed": {"key": "distributed", "type": "bool"}, + "early_stopping": {"key": "earlyStopping", "type": "bool"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "int"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "int"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "bool"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "int"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "int"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "int"}, + "learning_rate": {"key": "learningRate", "type": "float"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "float"}, + "nesterov": {"key": "nesterov", "type": "bool"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "int"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "int"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "int"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "float"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "int"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "int"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "int"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "float"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "int"}, + "weight_decay": {"key": "weightDecay", "type": "float"}, + } + + def __init__( # pylint: disable=too-many-locals + self, + *, + advanced_settings: Optional[str] = None, + ams_gradient: Optional[bool] = None, + augmentations: Optional[str] = None, + beta1: Optional[float] = None, + beta2: Optional[float] = None, + checkpoint_frequency: Optional[int] = None, + checkpoint_model: Optional["_models.MLFlowModelJobInput"] = None, + checkpoint_run_id: Optional[str] = None, + distributed: Optional[bool] = None, + early_stopping: Optional[bool] = None, + early_stopping_delay: Optional[int] = None, + early_stopping_patience: Optional[int] = None, + enable_onnx_normalization: Optional[bool] = None, + evaluation_frequency: Optional[int] = None, + gradient_accumulation_step: Optional[int] = None, + layers_to_freeze: Optional[int] = None, + learning_rate: Optional[float] = None, + learning_rate_scheduler: Optional[Union[str, "_models.LearningRateScheduler"]] = None, + model_name: Optional[str] = None, + momentum: Optional[float] = None, + nesterov: Optional[bool] = None, + number_of_epochs: Optional[int] = None, + number_of_workers: Optional[int] = None, + optimizer: Optional[Union[str, "_models.StochasticOptimizer"]] = None, + random_seed: Optional[int] = None, + step_lr_gamma: Optional[float] = None, + step_lr_step_size: Optional[int] = None, + training_batch_size: Optional[int] = None, + validation_batch_size: Optional[int] = None, + warmup_cosine_lr_cycles: Optional[float] = None, + warmup_cosine_lr_warmup_epochs: Optional[int] = None, + weight_decay: Optional[float] = None, + **kwargs + ): + """ + :keyword advanced_settings: Settings for advanced scenarios. + :paramtype advanced_settings: str + :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :paramtype ams_gradient: bool + :keyword augmentations: Settings for using Augmentations. + :paramtype augmentations: str + :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta1: float + :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta2: float + :keyword checkpoint_frequency: Frequency to store model checkpoints. Must be a positive + integer. + :paramtype checkpoint_frequency: int + :keyword checkpoint_model: The pretrained checkpoint model for incremental training. + :paramtype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput + :keyword checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for + incremental training. + :paramtype checkpoint_run_id: str + :keyword distributed: Whether to use distributed training. + :paramtype distributed: bool + :keyword early_stopping: Enable early stopping logic during training. + :paramtype early_stopping: bool + :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait + before primary metric improvement + is tracked for early stopping. Must be a positive integer. + :paramtype early_stopping_delay: int + :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :paramtype early_stopping_patience: int + :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. + :paramtype enable_onnx_normalization: bool + :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. + Must be a positive integer. + :paramtype evaluation_frequency: int + :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :paramtype gradient_accumulation_step: int + :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive + integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype layers_to_freeze: int + :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :paramtype learning_rate: float + :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. Known values are: "None", "WarmupCosine", and "Step". + :paramtype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.LearningRateScheduler + :keyword model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype model_name: str + :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, + 1]. + :paramtype momentum: float + :keyword nesterov: Enable nesterov when optimizer is 'sgd'. + :paramtype nesterov: bool + :keyword number_of_epochs: Number of training epochs. Must be a positive integer. + :paramtype number_of_epochs: int + :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. + :paramtype number_of_workers: int + :keyword optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", and "Adamw". + :paramtype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer + :keyword random_seed: Random seed to be used when using deterministic training. + :paramtype random_seed: int + :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float + in the range [0, 1]. + :paramtype step_lr_gamma: float + :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be + a positive integer. + :paramtype step_lr_step_size: int + :keyword training_batch_size: Training batch size. Must be a positive integer. + :paramtype training_batch_size: int + :keyword validation_batch_size: Validation batch size. Must be a positive integer. + :paramtype validation_batch_size: int + :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :paramtype warmup_cosine_lr_cycles: float + :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :paramtype warmup_cosine_lr_warmup_epochs: int + :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must + be a float in the range[0, 1]. + :paramtype weight_decay: float + """ + super().__init__(**kwargs) + self.advanced_settings = advanced_settings + self.ams_gradient = ams_gradient + self.augmentations = augmentations + self.beta1 = beta1 + self.beta2 = beta2 + self.checkpoint_frequency = checkpoint_frequency + self.checkpoint_model = checkpoint_model + self.checkpoint_run_id = checkpoint_run_id + self.distributed = distributed + self.early_stopping = early_stopping + self.early_stopping_delay = early_stopping_delay + self.early_stopping_patience = early_stopping_patience + self.enable_onnx_normalization = enable_onnx_normalization + self.evaluation_frequency = evaluation_frequency + self.gradient_accumulation_step = gradient_accumulation_step + self.layers_to_freeze = layers_to_freeze + self.learning_rate = learning_rate + self.learning_rate_scheduler = learning_rate_scheduler + self.model_name = model_name + self.momentum = momentum + self.nesterov = nesterov + self.number_of_epochs = number_of_epochs + self.number_of_workers = number_of_workers + self.optimizer = optimizer + self.random_seed = random_seed + self.step_lr_gamma = step_lr_gamma + self.step_lr_step_size = step_lr_step_size + self.training_batch_size = training_batch_size + self.validation_batch_size = validation_batch_size + self.warmup_cosine_lr_cycles = warmup_cosine_lr_cycles + self.warmup_cosine_lr_warmup_epochs = warmup_cosine_lr_warmup_epochs + self.weight_decay = weight_decay + + +class ImageModelSettingsClassification(ImageModelSettings): # pylint: disable=too-many-instance-attributes + """Settings used for training the model. + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar advanced_settings: Settings for advanced scenarios. + :vartype advanced_settings: str + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: bool + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: float + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: float + :ivar checkpoint_frequency: Frequency to store model checkpoints. Must be a positive integer. + :vartype checkpoint_frequency: int + :ivar checkpoint_model: The pretrained checkpoint model for incremental training. + :vartype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput + :ivar checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for + incremental training. + :vartype checkpoint_run_id: str + :ivar distributed: Whether to use distributed training. + :vartype distributed: bool + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: bool + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: int + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: int + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: bool + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: int + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: int + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: int + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: float + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. Known values are: "None", "WarmupCosine", and "Step". + :vartype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.LearningRateScheduler + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: float + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: bool + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: int + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: int + :ivar optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", and "Adamw". + :vartype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: int + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: float + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: int + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: int + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: int + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: float + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: int + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: float + :ivar training_crop_size: Image crop size that is input to the neural network for the training + dataset. Must be a positive integer. + :vartype training_crop_size: int + :ivar validation_crop_size: Image crop size that is input to the neural network for the + validation dataset. Must be a positive integer. + :vartype validation_crop_size: int + :ivar validation_resize_size: Image size to which to resize before cropping for validation + dataset. Must be a positive integer. + :vartype validation_resize_size: int + :ivar weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. + 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be + 0 or 1 or 2. + :vartype weighted_loss: int + """ + + _attribute_map = { + "advanced_settings": {"key": "advancedSettings", "type": "str"}, + "ams_gradient": {"key": "amsGradient", "type": "bool"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "float"}, + "beta2": {"key": "beta2", "type": "float"}, + "checkpoint_frequency": {"key": "checkpointFrequency", "type": "int"}, + "checkpoint_model": {"key": "checkpointModel", "type": "MLFlowModelJobInput"}, + "checkpoint_run_id": {"key": "checkpointRunId", "type": "str"}, + "distributed": {"key": "distributed", "type": "bool"}, + "early_stopping": {"key": "earlyStopping", "type": "bool"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "int"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "int"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "bool"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "int"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "int"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "int"}, + "learning_rate": {"key": "learningRate", "type": "float"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "float"}, + "nesterov": {"key": "nesterov", "type": "bool"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "int"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "int"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "int"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "float"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "int"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "int"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "int"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "float"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "int"}, + "weight_decay": {"key": "weightDecay", "type": "float"}, + "training_crop_size": {"key": "trainingCropSize", "type": "int"}, + "validation_crop_size": {"key": "validationCropSize", "type": "int"}, + "validation_resize_size": {"key": "validationResizeSize", "type": "int"}, + "weighted_loss": {"key": "weightedLoss", "type": "int"}, + } + + def __init__( # pylint: disable=too-many-locals + self, + *, + advanced_settings: Optional[str] = None, + ams_gradient: Optional[bool] = None, + augmentations: Optional[str] = None, + beta1: Optional[float] = None, + beta2: Optional[float] = None, + checkpoint_frequency: Optional[int] = None, + checkpoint_model: Optional["_models.MLFlowModelJobInput"] = None, + checkpoint_run_id: Optional[str] = None, + distributed: Optional[bool] = None, + early_stopping: Optional[bool] = None, + early_stopping_delay: Optional[int] = None, + early_stopping_patience: Optional[int] = None, + enable_onnx_normalization: Optional[bool] = None, + evaluation_frequency: Optional[int] = None, + gradient_accumulation_step: Optional[int] = None, + layers_to_freeze: Optional[int] = None, + learning_rate: Optional[float] = None, + learning_rate_scheduler: Optional[Union[str, "_models.LearningRateScheduler"]] = None, + model_name: Optional[str] = None, + momentum: Optional[float] = None, + nesterov: Optional[bool] = None, + number_of_epochs: Optional[int] = None, + number_of_workers: Optional[int] = None, + optimizer: Optional[Union[str, "_models.StochasticOptimizer"]] = None, + random_seed: Optional[int] = None, + step_lr_gamma: Optional[float] = None, + step_lr_step_size: Optional[int] = None, + training_batch_size: Optional[int] = None, + validation_batch_size: Optional[int] = None, + warmup_cosine_lr_cycles: Optional[float] = None, + warmup_cosine_lr_warmup_epochs: Optional[int] = None, + weight_decay: Optional[float] = None, + training_crop_size: Optional[int] = None, + validation_crop_size: Optional[int] = None, + validation_resize_size: Optional[int] = None, + weighted_loss: Optional[int] = None, + **kwargs + ): + """ + :keyword advanced_settings: Settings for advanced scenarios. + :paramtype advanced_settings: str + :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :paramtype ams_gradient: bool + :keyword augmentations: Settings for using Augmentations. + :paramtype augmentations: str + :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta1: float + :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta2: float + :keyword checkpoint_frequency: Frequency to store model checkpoints. Must be a positive + integer. + :paramtype checkpoint_frequency: int + :keyword checkpoint_model: The pretrained checkpoint model for incremental training. + :paramtype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput + :keyword checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for + incremental training. + :paramtype checkpoint_run_id: str + :keyword distributed: Whether to use distributed training. + :paramtype distributed: bool + :keyword early_stopping: Enable early stopping logic during training. + :paramtype early_stopping: bool + :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait + before primary metric improvement + is tracked for early stopping. Must be a positive integer. + :paramtype early_stopping_delay: int + :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :paramtype early_stopping_patience: int + :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. + :paramtype enable_onnx_normalization: bool + :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. + Must be a positive integer. + :paramtype evaluation_frequency: int + :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :paramtype gradient_accumulation_step: int + :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive + integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype layers_to_freeze: int + :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :paramtype learning_rate: float + :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. Known values are: "None", "WarmupCosine", and "Step". + :paramtype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.LearningRateScheduler + :keyword model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype model_name: str + :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, + 1]. + :paramtype momentum: float + :keyword nesterov: Enable nesterov when optimizer is 'sgd'. + :paramtype nesterov: bool + :keyword number_of_epochs: Number of training epochs. Must be a positive integer. + :paramtype number_of_epochs: int + :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. + :paramtype number_of_workers: int + :keyword optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", and "Adamw". + :paramtype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer + :keyword random_seed: Random seed to be used when using deterministic training. + :paramtype random_seed: int + :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float + in the range [0, 1]. + :paramtype step_lr_gamma: float + :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be + a positive integer. + :paramtype step_lr_step_size: int + :keyword training_batch_size: Training batch size. Must be a positive integer. + :paramtype training_batch_size: int + :keyword validation_batch_size: Validation batch size. Must be a positive integer. + :paramtype validation_batch_size: int + :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :paramtype warmup_cosine_lr_cycles: float + :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :paramtype warmup_cosine_lr_warmup_epochs: int + :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must + be a float in the range[0, 1]. + :paramtype weight_decay: float + :keyword training_crop_size: Image crop size that is input to the neural network for the + training dataset. Must be a positive integer. + :paramtype training_crop_size: int + :keyword validation_crop_size: Image crop size that is input to the neural network for the + validation dataset. Must be a positive integer. + :paramtype validation_crop_size: int + :keyword validation_resize_size: Image size to which to resize before cropping for validation + dataset. Must be a positive integer. + :paramtype validation_resize_size: int + :keyword weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. + 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be + 0 or 1 or 2. + :paramtype weighted_loss: int + """ + super().__init__( + advanced_settings=advanced_settings, + ams_gradient=ams_gradient, + augmentations=augmentations, + beta1=beta1, + beta2=beta2, + checkpoint_frequency=checkpoint_frequency, + checkpoint_model=checkpoint_model, + checkpoint_run_id=checkpoint_run_id, + distributed=distributed, + early_stopping=early_stopping, + early_stopping_delay=early_stopping_delay, + early_stopping_patience=early_stopping_patience, + enable_onnx_normalization=enable_onnx_normalization, + evaluation_frequency=evaluation_frequency, + gradient_accumulation_step=gradient_accumulation_step, + layers_to_freeze=layers_to_freeze, + learning_rate=learning_rate, + learning_rate_scheduler=learning_rate_scheduler, + model_name=model_name, + momentum=momentum, + nesterov=nesterov, + number_of_epochs=number_of_epochs, + number_of_workers=number_of_workers, + optimizer=optimizer, + random_seed=random_seed, + step_lr_gamma=step_lr_gamma, + step_lr_step_size=step_lr_step_size, + training_batch_size=training_batch_size, + validation_batch_size=validation_batch_size, + warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, + warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, + weight_decay=weight_decay, + **kwargs + ) + self.training_crop_size = training_crop_size + self.validation_crop_size = validation_crop_size + self.validation_resize_size = validation_resize_size + self.weighted_loss = weighted_loss + + +class ImageModelSettingsObjectDetection(ImageModelSettings): # pylint: disable=too-many-instance-attributes + """Settings used for training the model. + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar advanced_settings: Settings for advanced scenarios. + :vartype advanced_settings: str + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: bool + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: float + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: float + :ivar checkpoint_frequency: Frequency to store model checkpoints. Must be a positive integer. + :vartype checkpoint_frequency: int + :ivar checkpoint_model: The pretrained checkpoint model for incremental training. + :vartype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput + :ivar checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for + incremental training. + :vartype checkpoint_run_id: str + :ivar distributed: Whether to use distributed training. + :vartype distributed: bool + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: bool + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: int + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: int + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: bool + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: int + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: int + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: int + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: float + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. Known values are: "None", "WarmupCosine", and "Step". + :vartype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.LearningRateScheduler + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: float + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: bool + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: int + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: int + :ivar optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", and "Adamw". + :vartype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: int + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: float + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: int + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: int + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: int + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: float + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: int + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: float + :ivar box_detections_per_image: Maximum number of detections per image, for all classes. Must + be a positive integer. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype box_detections_per_image: int + :ivar box_score_threshold: During inference, only return proposals with a classification score + greater than + BoxScoreThreshold. Must be a float in the range[0, 1]. + :vartype box_score_threshold: float + :ivar image_size: Image size for train and validation. Must be a positive integer. + Note: The training run may get into CUDA OOM if the size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. + :vartype image_size: int + :ivar max_size: Maximum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype max_size: int + :ivar min_size: Minimum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype min_size: int + :ivar model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. + Note: training run may get into CUDA OOM if the model size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. Known values are: "None", + "Small", "Medium", "Large", and "ExtraLarge". + :vartype model_size: str or ~azure.mgmt.machinelearningservices.models.ModelSize + :ivar multi_scale: Enable multi-scale image by varying image size by +/- 50%. + Note: training run may get into CUDA OOM if no sufficient GPU memory. + Note: This settings is only supported for the 'yolov5' algorithm. + :vartype multi_scale: bool + :ivar nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be a + float in the range [0, 1]. + :vartype nms_iou_threshold: float + :ivar tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must not + be + None to enable small object detection logic. A string containing two integers in mxn format. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype tile_grid_size: str + :ivar tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be float + in the range [0, 1). + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype tile_overlap_ratio: float + :ivar tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging + predictions from tiles and image. + Used in validation/ inference. Must be float in the range [0, 1]. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype tile_predictions_nms_threshold: float + :ivar validation_iou_threshold: IOU threshold to use when computing validation metric. Must be + float in the range [0, 1]. + :vartype validation_iou_threshold: float + :ivar validation_metric_type: Metric computation method to use for validation metrics. Known + values are: "None", "Coco", "Voc", and "CocoVoc". + :vartype validation_metric_type: str or + ~azure.mgmt.machinelearningservices.models.ValidationMetricType + """ + + _attribute_map = { + "advanced_settings": {"key": "advancedSettings", "type": "str"}, + "ams_gradient": {"key": "amsGradient", "type": "bool"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "float"}, + "beta2": {"key": "beta2", "type": "float"}, + "checkpoint_frequency": {"key": "checkpointFrequency", "type": "int"}, + "checkpoint_model": {"key": "checkpointModel", "type": "MLFlowModelJobInput"}, + "checkpoint_run_id": {"key": "checkpointRunId", "type": "str"}, + "distributed": {"key": "distributed", "type": "bool"}, + "early_stopping": {"key": "earlyStopping", "type": "bool"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "int"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "int"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "bool"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "int"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "int"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "int"}, + "learning_rate": {"key": "learningRate", "type": "float"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "float"}, + "nesterov": {"key": "nesterov", "type": "bool"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "int"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "int"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "int"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "float"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "int"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "int"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "int"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "float"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "int"}, + "weight_decay": {"key": "weightDecay", "type": "float"}, + "box_detections_per_image": {"key": "boxDetectionsPerImage", "type": "int"}, + "box_score_threshold": {"key": "boxScoreThreshold", "type": "float"}, + "image_size": {"key": "imageSize", "type": "int"}, + "max_size": {"key": "maxSize", "type": "int"}, + "min_size": {"key": "minSize", "type": "int"}, + "model_size": {"key": "modelSize", "type": "str"}, + "multi_scale": {"key": "multiScale", "type": "bool"}, + "nms_iou_threshold": {"key": "nmsIouThreshold", "type": "float"}, + "tile_grid_size": {"key": "tileGridSize", "type": "str"}, + "tile_overlap_ratio": {"key": "tileOverlapRatio", "type": "float"}, + "tile_predictions_nms_threshold": {"key": "tilePredictionsNmsThreshold", "type": "float"}, + "validation_iou_threshold": {"key": "validationIouThreshold", "type": "float"}, + "validation_metric_type": {"key": "validationMetricType", "type": "str"}, + } + + def __init__( # pylint: disable=too-many-locals + self, + *, + advanced_settings: Optional[str] = None, + ams_gradient: Optional[bool] = None, + augmentations: Optional[str] = None, + beta1: Optional[float] = None, + beta2: Optional[float] = None, + checkpoint_frequency: Optional[int] = None, + checkpoint_model: Optional["_models.MLFlowModelJobInput"] = None, + checkpoint_run_id: Optional[str] = None, + distributed: Optional[bool] = None, + early_stopping: Optional[bool] = None, + early_stopping_delay: Optional[int] = None, + early_stopping_patience: Optional[int] = None, + enable_onnx_normalization: Optional[bool] = None, + evaluation_frequency: Optional[int] = None, + gradient_accumulation_step: Optional[int] = None, + layers_to_freeze: Optional[int] = None, + learning_rate: Optional[float] = None, + learning_rate_scheduler: Optional[Union[str, "_models.LearningRateScheduler"]] = None, + model_name: Optional[str] = None, + momentum: Optional[float] = None, + nesterov: Optional[bool] = None, + number_of_epochs: Optional[int] = None, + number_of_workers: Optional[int] = None, + optimizer: Optional[Union[str, "_models.StochasticOptimizer"]] = None, + random_seed: Optional[int] = None, + step_lr_gamma: Optional[float] = None, + step_lr_step_size: Optional[int] = None, + training_batch_size: Optional[int] = None, + validation_batch_size: Optional[int] = None, + warmup_cosine_lr_cycles: Optional[float] = None, + warmup_cosine_lr_warmup_epochs: Optional[int] = None, + weight_decay: Optional[float] = None, + box_detections_per_image: Optional[int] = None, + box_score_threshold: Optional[float] = None, + image_size: Optional[int] = None, + max_size: Optional[int] = None, + min_size: Optional[int] = None, + model_size: Optional[Union[str, "_models.ModelSize"]] = None, + multi_scale: Optional[bool] = None, + nms_iou_threshold: Optional[float] = None, + tile_grid_size: Optional[str] = None, + tile_overlap_ratio: Optional[float] = None, + tile_predictions_nms_threshold: Optional[float] = None, + validation_iou_threshold: Optional[float] = None, + validation_metric_type: Optional[Union[str, "_models.ValidationMetricType"]] = None, + **kwargs + ): + """ + :keyword advanced_settings: Settings for advanced scenarios. + :paramtype advanced_settings: str + :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :paramtype ams_gradient: bool + :keyword augmentations: Settings for using Augmentations. + :paramtype augmentations: str + :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta1: float + :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta2: float + :keyword checkpoint_frequency: Frequency to store model checkpoints. Must be a positive + integer. + :paramtype checkpoint_frequency: int + :keyword checkpoint_model: The pretrained checkpoint model for incremental training. + :paramtype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput + :keyword checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for + incremental training. + :paramtype checkpoint_run_id: str + :keyword distributed: Whether to use distributed training. + :paramtype distributed: bool + :keyword early_stopping: Enable early stopping logic during training. + :paramtype early_stopping: bool + :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait + before primary metric improvement + is tracked for early stopping. Must be a positive integer. + :paramtype early_stopping_delay: int + :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :paramtype early_stopping_patience: int + :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. + :paramtype enable_onnx_normalization: bool + :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. + Must be a positive integer. + :paramtype evaluation_frequency: int + :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :paramtype gradient_accumulation_step: int + :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive + integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype layers_to_freeze: int + :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :paramtype learning_rate: float + :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. Known values are: "None", "WarmupCosine", and "Step". + :paramtype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.LearningRateScheduler + :keyword model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype model_name: str + :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, + 1]. + :paramtype momentum: float + :keyword nesterov: Enable nesterov when optimizer is 'sgd'. + :paramtype nesterov: bool + :keyword number_of_epochs: Number of training epochs. Must be a positive integer. + :paramtype number_of_epochs: int + :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. + :paramtype number_of_workers: int + :keyword optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", and "Adamw". + :paramtype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer + :keyword random_seed: Random seed to be used when using deterministic training. + :paramtype random_seed: int + :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float + in the range [0, 1]. + :paramtype step_lr_gamma: float + :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be + a positive integer. + :paramtype step_lr_step_size: int + :keyword training_batch_size: Training batch size. Must be a positive integer. + :paramtype training_batch_size: int + :keyword validation_batch_size: Validation batch size. Must be a positive integer. + :paramtype validation_batch_size: int + :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :paramtype warmup_cosine_lr_cycles: float + :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :paramtype warmup_cosine_lr_warmup_epochs: int + :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must + be a float in the range[0, 1]. + :paramtype weight_decay: float + :keyword box_detections_per_image: Maximum number of detections per image, for all classes. + Must be a positive integer. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype box_detections_per_image: int + :keyword box_score_threshold: During inference, only return proposals with a classification + score greater than + BoxScoreThreshold. Must be a float in the range[0, 1]. + :paramtype box_score_threshold: float + :keyword image_size: Image size for train and validation. Must be a positive integer. + Note: The training run may get into CUDA OOM if the size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. + :paramtype image_size: int + :keyword max_size: Maximum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype max_size: int + :keyword min_size: Minimum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype min_size: int + :keyword model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. + Note: training run may get into CUDA OOM if the model size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. Known values are: "None", + "Small", "Medium", "Large", and "ExtraLarge". + :paramtype model_size: str or ~azure.mgmt.machinelearningservices.models.ModelSize + :keyword multi_scale: Enable multi-scale image by varying image size by +/- 50%. + Note: training run may get into CUDA OOM if no sufficient GPU memory. + Note: This settings is only supported for the 'yolov5' algorithm. + :paramtype multi_scale: bool + :keyword nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be + a float in the range [0, 1]. + :paramtype nms_iou_threshold: float + :keyword tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must + not be + None to enable small object detection logic. A string containing two integers in mxn format. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype tile_grid_size: str + :keyword tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be + float in the range [0, 1). + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype tile_overlap_ratio: float + :keyword tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging + predictions from tiles and image. + Used in validation/ inference. Must be float in the range [0, 1]. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype tile_predictions_nms_threshold: float + :keyword validation_iou_threshold: IOU threshold to use when computing validation metric. Must + be float in the range [0, 1]. + :paramtype validation_iou_threshold: float + :keyword validation_metric_type: Metric computation method to use for validation metrics. Known + values are: "None", "Coco", "Voc", and "CocoVoc". + :paramtype validation_metric_type: str or + ~azure.mgmt.machinelearningservices.models.ValidationMetricType + """ + super().__init__( + advanced_settings=advanced_settings, + ams_gradient=ams_gradient, + augmentations=augmentations, + beta1=beta1, + beta2=beta2, + checkpoint_frequency=checkpoint_frequency, + checkpoint_model=checkpoint_model, + checkpoint_run_id=checkpoint_run_id, + distributed=distributed, + early_stopping=early_stopping, + early_stopping_delay=early_stopping_delay, + early_stopping_patience=early_stopping_patience, + enable_onnx_normalization=enable_onnx_normalization, + evaluation_frequency=evaluation_frequency, + gradient_accumulation_step=gradient_accumulation_step, + layers_to_freeze=layers_to_freeze, + learning_rate=learning_rate, + learning_rate_scheduler=learning_rate_scheduler, + model_name=model_name, + momentum=momentum, + nesterov=nesterov, + number_of_epochs=number_of_epochs, + number_of_workers=number_of_workers, + optimizer=optimizer, + random_seed=random_seed, + step_lr_gamma=step_lr_gamma, + step_lr_step_size=step_lr_step_size, + training_batch_size=training_batch_size, + validation_batch_size=validation_batch_size, + warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, + warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, + weight_decay=weight_decay, + **kwargs + ) + self.box_detections_per_image = box_detections_per_image + self.box_score_threshold = box_score_threshold + self.image_size = image_size + self.max_size = max_size + self.min_size = min_size + self.model_size = model_size + self.multi_scale = multi_scale + self.nms_iou_threshold = nms_iou_threshold + self.tile_grid_size = tile_grid_size + self.tile_overlap_ratio = tile_overlap_ratio + self.tile_predictions_nms_threshold = tile_predictions_nms_threshold + self.validation_iou_threshold = validation_iou_threshold + self.validation_metric_type = validation_metric_type + + +class ImageObjectDetection(ImageObjectDetectionBase, AutoMLVertical): # pylint: disable=too-many-instance-attributes + """Image Object Detection. Object detection is used to identify objects in an image and locate each object with a + bounding box e.g. locate all dogs and cats in an image and draw a bounding box around each. + + All required parameters must be populated in order to send to Azure. + + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar model_settings: Settings used for training the model. + :vartype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] + :ivar primary_metric: Primary metric to optimize for this task. "MeanAveragePrecision" + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ObjectDetectionPrimaryMetrics + """ + + _validation = { + "task_type": {"required": True}, + "training_data": {"required": True}, + "limit_settings": {"required": True}, + } + + _attribute_map = { + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsObjectDetection"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsObjectDetection]"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, + } + + def __init__( + self, + *, + training_data: "_models.MLTableJobInput", + limit_settings: "_models.ImageLimitSettings", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + model_settings: Optional["_models.ImageModelSettingsObjectDetection"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsObjectDetection"]] = None, + primary_metric: Optional[Union[str, "_models.ObjectDetectionPrimaryMetrics"]] = None, + **kwargs + ): + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword model_settings: Settings used for training the model. + :paramtype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] + :keyword primary_metric: Primary metric to optimize for this task. "MeanAveragePrecision" + :paramtype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ObjectDetectionPrimaryMetrics + """ + super().__init__( + limit_settings=limit_settings, + sweep_settings=sweep_settings, + validation_data=validation_data, + validation_data_size=validation_data_size, + model_settings=model_settings, + search_space=search_space, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type = "ImageObjectDetection" # type: str + self.training_data = training_data + self.primary_metric = primary_metric + self.limit_settings = limit_settings + self.sweep_settings = sweep_settings + self.validation_data = validation_data + self.validation_data_size = validation_data_size + self.model_settings = model_settings + self.search_space = search_space + + +class ImageSweepSettings(_serialization.Model): + """Model sweeping and hyperparameter sweeping related settings. + + All required parameters must be populated in order to send to Azure. + + :ivar early_termination: Type of early termination policy. + :vartype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy + :ivar sampling_algorithm: [Required] Type of the hyperparameter sampling algorithms. Required. + Known values are: "Grid", "Random", and "Bayesian". + :vartype sampling_algorithm: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + """ + + _validation = { + "sampling_algorithm": {"required": True}, + } + + _attribute_map = { + "early_termination": {"key": "earlyTermination", "type": "EarlyTerminationPolicy"}, + "sampling_algorithm": {"key": "samplingAlgorithm", "type": "str"}, + } + + def __init__( + self, + *, + sampling_algorithm: Union[str, "_models.SamplingAlgorithmType"], + early_termination: Optional["_models.EarlyTerminationPolicy"] = None, + **kwargs + ): + """ + :keyword early_termination: Type of early termination policy. + :paramtype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy + :keyword sampling_algorithm: [Required] Type of the hyperparameter sampling algorithms. + Required. Known values are: "Grid", "Random", and "Bayesian". + :paramtype sampling_algorithm: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + """ + super().__init__(**kwargs) + self.early_termination = early_termination + self.sampling_algorithm = sampling_algorithm + + +class InferenceContainerProperties(_serialization.Model): + """InferenceContainerProperties. + + :ivar liveness_route: The route to check the liveness of the inference server container. + :vartype liveness_route: ~azure.mgmt.machinelearningservices.models.Route + :ivar readiness_route: The route to check the readiness of the inference server container. + :vartype readiness_route: ~azure.mgmt.machinelearningservices.models.Route + :ivar scoring_route: The port to send the scoring requests to, within the inference server + container. + :vartype scoring_route: ~azure.mgmt.machinelearningservices.models.Route + """ + + _attribute_map = { + "liveness_route": {"key": "livenessRoute", "type": "Route"}, + "readiness_route": {"key": "readinessRoute", "type": "Route"}, + "scoring_route": {"key": "scoringRoute", "type": "Route"}, + } + + def __init__( + self, + *, + liveness_route: Optional["_models.Route"] = None, + readiness_route: Optional["_models.Route"] = None, + scoring_route: Optional["_models.Route"] = None, + **kwargs + ): + """ + :keyword liveness_route: The route to check the liveness of the inference server container. + :paramtype liveness_route: ~azure.mgmt.machinelearningservices.models.Route + :keyword readiness_route: The route to check the readiness of the inference server container. + :paramtype readiness_route: ~azure.mgmt.machinelearningservices.models.Route + :keyword scoring_route: The port to send the scoring requests to, within the inference server + container. + :paramtype scoring_route: ~azure.mgmt.machinelearningservices.models.Route + """ + super().__init__(**kwargs) + self.liveness_route = liveness_route + self.readiness_route = readiness_route + self.scoring_route = scoring_route + + +class InstanceTypeSchema(_serialization.Model): + """Instance type schema. + + :ivar node_selector: Node Selector. + :vartype node_selector: dict[str, str] + :ivar resources: Resource requests/limits for this instance type. + :vartype resources: ~azure.mgmt.machinelearningservices.models.InstanceTypeSchemaResources + """ + + _attribute_map = { + "node_selector": {"key": "nodeSelector", "type": "{str}"}, + "resources": {"key": "resources", "type": "InstanceTypeSchemaResources"}, + } + + def __init__( + self, + *, + node_selector: Optional[Dict[str, str]] = None, + resources: Optional["_models.InstanceTypeSchemaResources"] = None, + **kwargs + ): + """ + :keyword node_selector: Node Selector. + :paramtype node_selector: dict[str, str] + :keyword resources: Resource requests/limits for this instance type. + :paramtype resources: ~azure.mgmt.machinelearningservices.models.InstanceTypeSchemaResources + """ + super().__init__(**kwargs) + self.node_selector = node_selector + self.resources = resources + + +class InstanceTypeSchemaResources(_serialization.Model): + """Resource requests/limits for this instance type. + + :ivar requests: Resource requests for this instance type. + :vartype requests: dict[str, str] + :ivar limits: Resource limits for this instance type. + :vartype limits: dict[str, str] + """ + + _attribute_map = { + "requests": {"key": "requests", "type": "{str}"}, + "limits": {"key": "limits", "type": "{str}"}, + } + + def __init__(self, *, requests: Optional[Dict[str, str]] = None, limits: Optional[Dict[str, str]] = None, **kwargs): + """ + :keyword requests: Resource requests for this instance type. + :paramtype requests: dict[str, str] + :keyword limits: Resource limits for this instance type. + :paramtype limits: dict[str, str] + """ + super().__init__(**kwargs) + self.requests = requests + self.limits = limits + + +class JobBase(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.JobBaseProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "JobBaseProperties"}, + } + + def __init__(self, *, properties: "_models.JobBaseProperties", **kwargs): + """ + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.JobBaseProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class JobBaseResourceArmPaginatedResult(_serialization.Model): + """A paginated list of JobBase entities. + + :ivar next_link: The link to the next page of JobBase objects. If null, there are no additional + pages. + :vartype next_link: str + :ivar value: An array of objects of type JobBase. + :vartype value: list[~azure.mgmt.machinelearningservices.models.JobBase] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[JobBase]"}, + } + + def __init__(self, *, next_link: Optional[str] = None, value: Optional[List["_models.JobBase"]] = None, **kwargs): + """ + :keyword next_link: The link to the next page of JobBase objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type JobBase. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.JobBase] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class JobResourceConfiguration(ResourceConfiguration): + """JobResourceConfiguration. + + :ivar instance_count: Optional number of instances or nodes used by the compute target. + :vartype instance_count: int + :ivar instance_type: Optional type of VM used as supported by the compute target. + :vartype instance_type: str + :ivar properties: Additional properties bag. + :vartype properties: dict[str, JSON] + :ivar docker_args: Extra arguments to pass to the Docker run command. This would override any + parameters that have already been set by the system, or in this section. This parameter is only + supported for Azure ML compute types. + :vartype docker_args: str + :ivar shm_size: Size of the docker container's shared memory block. This should be in the + format of (number)(unit) where number as to be greater than 0 and the unit can be one of + b(bytes), k(kilobytes), m(megabytes), or g(gigabytes). + :vartype shm_size: str + """ + + _validation = { + "shm_size": {"pattern": r"\d+[bBkKmMgG]"}, + } + + _attribute_map = { + "instance_count": {"key": "instanceCount", "type": "int"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "properties": {"key": "properties", "type": "{object}"}, + "docker_args": {"key": "dockerArgs", "type": "str"}, + "shm_size": {"key": "shmSize", "type": "str"}, + } + + def __init__( + self, + *, + instance_count: int = 1, + instance_type: Optional[str] = None, + properties: Optional[Dict[str, JSON]] = None, + docker_args: Optional[str] = None, + shm_size: str = "2g", + **kwargs + ): + """ + :keyword instance_count: Optional number of instances or nodes used by the compute target. + :paramtype instance_count: int + :keyword instance_type: Optional type of VM used as supported by the compute target. + :paramtype instance_type: str + :keyword properties: Additional properties bag. + :paramtype properties: dict[str, JSON] + :keyword docker_args: Extra arguments to pass to the Docker run command. This would override + any parameters that have already been set by the system, or in this section. This parameter is + only supported for Azure ML compute types. + :paramtype docker_args: str + :keyword shm_size: Size of the docker container's shared memory block. This should be in the + format of (number)(unit) where number as to be greater than 0 and the unit can be one of + b(bytes), k(kilobytes), m(megabytes), or g(gigabytes). + :paramtype shm_size: str + """ + super().__init__(instance_count=instance_count, instance_type=instance_type, properties=properties, **kwargs) + self.docker_args = docker_args + self.shm_size = shm_size + + +class JobScheduleAction(ScheduleActionBase): + """JobScheduleAction. + + All required parameters must be populated in order to send to Azure. + + :ivar action_type: [Required] Specifies the action type of the schedule. Required. Known values + are: "CreateJob" and "InvokeBatchEndpoint". + :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType + :ivar job_definition: [Required] Defines Schedule action definition details. Required. + :vartype job_definition: ~azure.mgmt.machinelearningservices.models.JobBaseProperties + """ + + _validation = { + "action_type": {"required": True}, + "job_definition": {"required": True}, + } + + _attribute_map = { + "action_type": {"key": "actionType", "type": "str"}, + "job_definition": {"key": "jobDefinition", "type": "JobBaseProperties"}, + } + + def __init__(self, *, job_definition: "_models.JobBaseProperties", **kwargs): + """ + :keyword job_definition: [Required] Defines Schedule action definition details. Required. + :paramtype job_definition: ~azure.mgmt.machinelearningservices.models.JobBaseProperties + """ + super().__init__(**kwargs) + self.action_type = "CreateJob" # type: str + self.job_definition = job_definition + + +class JobService(_serialization.Model): + """Job endpoint definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar endpoint: Url for endpoint. + :vartype endpoint: str + :ivar error_message: Any error in the service. + :vartype error_message: str + :ivar job_service_type: Endpoint type. + :vartype job_service_type: str + :ivar nodes: Nodes that user would like to start the service on. + If Nodes is not set or set to null, the service will only be started on leader node. + :vartype nodes: ~azure.mgmt.machinelearningservices.models.Nodes + :ivar port: Port for endpoint set by user. + :vartype port: int + :ivar properties: Additional properties to set on the endpoint. + :vartype properties: dict[str, str] + :ivar status: Status of endpoint. + :vartype status: str + """ + + _validation = { + "error_message": {"readonly": True}, + "status": {"readonly": True}, + } + + _attribute_map = { + "endpoint": {"key": "endpoint", "type": "str"}, + "error_message": {"key": "errorMessage", "type": "str"}, + "job_service_type": {"key": "jobServiceType", "type": "str"}, + "nodes": {"key": "nodes", "type": "Nodes"}, + "port": {"key": "port", "type": "int"}, + "properties": {"key": "properties", "type": "{str}"}, + "status": {"key": "status", "type": "str"}, + } + + def __init__( + self, + *, + endpoint: Optional[str] = None, + job_service_type: Optional[str] = None, + nodes: Optional["_models.Nodes"] = None, + port: Optional[int] = None, + properties: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword endpoint: Url for endpoint. + :paramtype endpoint: str + :keyword job_service_type: Endpoint type. + :paramtype job_service_type: str + :keyword nodes: Nodes that user would like to start the service on. + If Nodes is not set or set to null, the service will only be started on leader node. + :paramtype nodes: ~azure.mgmt.machinelearningservices.models.Nodes + :keyword port: Port for endpoint set by user. + :paramtype port: int + :keyword properties: Additional properties to set on the endpoint. + :paramtype properties: dict[str, str] + """ + super().__init__(**kwargs) + self.endpoint = endpoint + self.error_message = None + self.job_service_type = job_service_type + self.nodes = nodes + self.port = port + self.properties = properties + self.status = None + + +class KerberosCredentials(_serialization.Model): + """KerberosCredentials. + + All required parameters must be populated in order to send to Azure. + + :ivar kerberos_kdc_address: [Required] IP Address or DNS HostName. Required. + :vartype kerberos_kdc_address: str + :ivar kerberos_principal: [Required] Kerberos Username. Required. + :vartype kerberos_principal: str + :ivar kerberos_realm: [Required] Domain over which a Kerberos authentication server has the + authority to authenticate a user, host or service. Required. + :vartype kerberos_realm: str + """ + + _validation = { + "kerberos_kdc_address": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_principal": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_realm": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "kerberos_kdc_address": {"key": "kerberosKdcAddress", "type": "str"}, + "kerberos_principal": {"key": "kerberosPrincipal", "type": "str"}, + "kerberos_realm": {"key": "kerberosRealm", "type": "str"}, + } + + def __init__(self, *, kerberos_kdc_address: str, kerberos_principal: str, kerberos_realm: str, **kwargs): + """ + :keyword kerberos_kdc_address: [Required] IP Address or DNS HostName. Required. + :paramtype kerberos_kdc_address: str + :keyword kerberos_principal: [Required] Kerberos Username. Required. + :paramtype kerberos_principal: str + :keyword kerberos_realm: [Required] Domain over which a Kerberos authentication server has the + authority to authenticate a user, host or service. Required. + :paramtype kerberos_realm: str + """ + super().__init__(**kwargs) + self.kerberos_kdc_address = kerberos_kdc_address + self.kerberos_principal = kerberos_principal + self.kerberos_realm = kerberos_realm + + +class KerberosKeytabCredentials(KerberosCredentials, DatastoreCredentials): + """KerberosKeytabCredentials. + + All required parameters must be populated in order to send to Azure. + + :ivar credentials_type: [Required] Credential type used to authentication with storage. + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". + :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType + :ivar kerberos_kdc_address: [Required] IP Address or DNS HostName. Required. + :vartype kerberos_kdc_address: str + :ivar kerberos_principal: [Required] Kerberos Username. Required. + :vartype kerberos_principal: str + :ivar kerberos_realm: [Required] Domain over which a Kerberos authentication server has the + authority to authenticate a user, host or service. Required. + :vartype kerberos_realm: str + :ivar secrets: [Required] Keytab secrets. Required. + :vartype secrets: ~azure.mgmt.machinelearningservices.models.KerberosKeytabSecrets + """ + + _validation = { + "credentials_type": {"required": True}, + "kerberos_kdc_address": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_principal": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_realm": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + "secrets": {"required": True}, + } + + _attribute_map = { + "credentials_type": {"key": "credentialsType", "type": "str"}, + "kerberos_kdc_address": {"key": "kerberosKdcAddress", "type": "str"}, + "kerberos_principal": {"key": "kerberosPrincipal", "type": "str"}, + "kerberos_realm": {"key": "kerberosRealm", "type": "str"}, + "secrets": {"key": "secrets", "type": "KerberosKeytabSecrets"}, + } + + def __init__( + self, + *, + kerberos_kdc_address: str, + kerberos_principal: str, + kerberos_realm: str, + secrets: "_models.KerberosKeytabSecrets", + **kwargs + ): + """ + :keyword kerberos_kdc_address: [Required] IP Address or DNS HostName. Required. + :paramtype kerberos_kdc_address: str + :keyword kerberos_principal: [Required] Kerberos Username. Required. + :paramtype kerberos_principal: str + :keyword kerberos_realm: [Required] Domain over which a Kerberos authentication server has the + authority to authenticate a user, host or service. Required. + :paramtype kerberos_realm: str + :keyword secrets: [Required] Keytab secrets. Required. + :paramtype secrets: ~azure.mgmt.machinelearningservices.models.KerberosKeytabSecrets + """ + super().__init__( + kerberos_kdc_address=kerberos_kdc_address, + kerberos_principal=kerberos_principal, + kerberos_realm=kerberos_realm, + **kwargs + ) + self.credentials_type = "KerberosKeytab" # type: str + self.secrets = secrets + self.kerberos_kdc_address = kerberos_kdc_address + self.kerberos_principal = kerberos_principal + self.kerberos_realm = kerberos_realm + + +class KerberosKeytabSecrets(DatastoreSecrets): + """KerberosKeytabSecrets. + + All required parameters must be populated in order to send to Azure. + + :ivar secrets_type: [Required] Credential type used to authentication with storage. Required. + Known values are: "AccountKey", "Certificate", "Sas", "ServicePrincipal", "KerberosPassword", + and "KerberosKeytab". + :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType + :ivar kerberos_keytab: Kerberos keytab secret. + :vartype kerberos_keytab: str + """ + + _validation = { + "secrets_type": {"required": True}, + } + + _attribute_map = { + "secrets_type": {"key": "secretsType", "type": "str"}, + "kerberos_keytab": {"key": "kerberosKeytab", "type": "str"}, + } + + def __init__(self, *, kerberos_keytab: Optional[str] = None, **kwargs): + """ + :keyword kerberos_keytab: Kerberos keytab secret. + :paramtype kerberos_keytab: str + """ + super().__init__(**kwargs) + self.secrets_type = "KerberosKeytab" # type: str + self.kerberos_keytab = kerberos_keytab + + +class KerberosPasswordCredentials(KerberosCredentials, DatastoreCredentials): + """KerberosPasswordCredentials. + + All required parameters must be populated in order to send to Azure. + + :ivar credentials_type: [Required] Credential type used to authentication with storage. + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". + :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType + :ivar kerberos_kdc_address: [Required] IP Address or DNS HostName. Required. + :vartype kerberos_kdc_address: str + :ivar kerberos_principal: [Required] Kerberos Username. Required. + :vartype kerberos_principal: str + :ivar kerberos_realm: [Required] Domain over which a Kerberos authentication server has the + authority to authenticate a user, host or service. Required. + :vartype kerberos_realm: str + :ivar secrets: [Required] Kerberos password secrets. Required. + :vartype secrets: ~azure.mgmt.machinelearningservices.models.KerberosPasswordSecrets + """ + + _validation = { + "credentials_type": {"required": True}, + "kerberos_kdc_address": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_principal": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_realm": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + "secrets": {"required": True}, + } + + _attribute_map = { + "credentials_type": {"key": "credentialsType", "type": "str"}, + "kerberos_kdc_address": {"key": "kerberosKdcAddress", "type": "str"}, + "kerberos_principal": {"key": "kerberosPrincipal", "type": "str"}, + "kerberos_realm": {"key": "kerberosRealm", "type": "str"}, + "secrets": {"key": "secrets", "type": "KerberosPasswordSecrets"}, + } + + def __init__( + self, + *, + kerberos_kdc_address: str, + kerberos_principal: str, + kerberos_realm: str, + secrets: "_models.KerberosPasswordSecrets", + **kwargs + ): + """ + :keyword kerberos_kdc_address: [Required] IP Address or DNS HostName. Required. + :paramtype kerberos_kdc_address: str + :keyword kerberos_principal: [Required] Kerberos Username. Required. + :paramtype kerberos_principal: str + :keyword kerberos_realm: [Required] Domain over which a Kerberos authentication server has the + authority to authenticate a user, host or service. Required. + :paramtype kerberos_realm: str + :keyword secrets: [Required] Kerberos password secrets. Required. + :paramtype secrets: ~azure.mgmt.machinelearningservices.models.KerberosPasswordSecrets + """ + super().__init__( + kerberos_kdc_address=kerberos_kdc_address, + kerberos_principal=kerberos_principal, + kerberos_realm=kerberos_realm, + **kwargs + ) + self.credentials_type = "KerberosPassword" # type: str + self.secrets = secrets + self.kerberos_kdc_address = kerberos_kdc_address + self.kerberos_principal = kerberos_principal + self.kerberos_realm = kerberos_realm + + +class KerberosPasswordSecrets(DatastoreSecrets): + """KerberosPasswordSecrets. + + All required parameters must be populated in order to send to Azure. + + :ivar secrets_type: [Required] Credential type used to authentication with storage. Required. + Known values are: "AccountKey", "Certificate", "Sas", "ServicePrincipal", "KerberosPassword", + and "KerberosKeytab". + :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType + :ivar kerberos_password: Kerberos password secret. + :vartype kerberos_password: str + """ + + _validation = { + "secrets_type": {"required": True}, + } + + _attribute_map = { + "secrets_type": {"key": "secretsType", "type": "str"}, + "kerberos_password": {"key": "kerberosPassword", "type": "str"}, + } + + def __init__(self, *, kerberos_password: Optional[str] = None, **kwargs): + """ + :keyword kerberos_password: Kerberos password secret. + :paramtype kerberos_password: str + """ + super().__init__(**kwargs) + self.secrets_type = "KerberosPassword" # type: str + self.kerberos_password = kerberos_password + + +class KubernetesSchema(_serialization.Model): + """Kubernetes Compute Schema. + + :ivar properties: Properties of Kubernetes. + :vartype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties + """ + + _attribute_map = { + "properties": {"key": "properties", "type": "KubernetesProperties"}, + } + + def __init__(self, *, properties: Optional["_models.KubernetesProperties"] = None, **kwargs): + """ + :keyword properties: Properties of Kubernetes. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class Kubernetes(Compute, KubernetesSchema): # pylint: disable=too-many-instance-attributes + """A Machine Learning compute based on Kubernetes Compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar properties: Properties of Kubernetes. + :vartype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ComputeProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: ~datetime.datetime + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool + """ + + _validation = { + "compute_type": {"required": True}, + "compute_location": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, + "disable_local_auth": {"readonly": True}, + } + + _attribute_map = { + "properties": {"key": "properties", "type": "KubernetesProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, + } + + def __init__( + self, + *, + properties: Optional["_models.KubernetesProperties"] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + **kwargs + ): + """ + :keyword properties: Properties of Kubernetes. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + """ + super().__init__(description=description, resource_id=resource_id, properties=properties, **kwargs) + self.properties = properties + self.compute_type = "Kubernetes" # type: str + self.compute_location = None + self.provisioning_state = None + self.description = description + self.created_on = None + self.modified_on = None + self.resource_id = resource_id + self.provisioning_errors = None + self.is_attached_compute = None + self.disable_local_auth = None + + +class OnlineDeploymentProperties(EndpointDeploymentPropertiesBase): # pylint: disable=too-many-instance-attributes + """OnlineDeploymentProperties. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + KubernetesOnlineDeployment, ManagedOnlineDeployment + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar code_configuration: Code configuration for the endpoint deployment. + :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :ivar description: Description of the endpoint deployment. + :vartype description: str + :ivar environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :vartype environment_id: str + :ivar environment_variables: Environment variables configuration for the deployment. + :vartype environment_variables: dict[str, str] + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar app_insights_enabled: If true, enables Application Insights logging. + :vartype app_insights_enabled: bool + :ivar egress_public_network_access: If Enabled, allow egress public network access. If + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and + "Disabled". + :vartype egress_public_network_access: str or + ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType + :ivar endpoint_compute_type: [Required] The compute type of the endpoint. Required. Known + values are: "Managed", "Kubernetes", and "AzureMLCompute". + :vartype endpoint_compute_type: str or + ~azure.mgmt.machinelearningservices.models.EndpointComputeType + :ivar instance_type: Compute instance type. + :vartype instance_type: str + :ivar liveness_probe: Liveness probe monitors the health of the container regularly. + :vartype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :ivar model: The URI path to the model. + :vartype model: str + :ivar model_mount_path: The path to mount the model in custom container. + :vartype model_mount_path: str + :ivar provisioning_state: Provisioning state for the endpoint deployment. Known values are: + "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState + :ivar readiness_probe: Readiness probe validates if the container is ready to serve traffic. + The properties and defaults are the same as liveness probe. + :vartype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :ivar request_settings: Request settings for the deployment. + :vartype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :ivar scale_settings: Scale settings for the deployment. + If it is null or not provided, + it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment + and to DefaultScaleSettings for ManagedOnlineDeployment. + :vartype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + """ + + _validation = { + "endpoint_compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, + "description": {"key": "description", "type": "str"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "properties": {"key": "properties", "type": "{str}"}, + "app_insights_enabled": {"key": "appInsightsEnabled", "type": "bool"}, + "egress_public_network_access": {"key": "egressPublicNetworkAccess", "type": "str"}, + "endpoint_compute_type": {"key": "endpointComputeType", "type": "str"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "liveness_probe": {"key": "livenessProbe", "type": "ProbeSettings"}, + "model": {"key": "model", "type": "str"}, + "model_mount_path": {"key": "modelMountPath", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "readiness_probe": {"key": "readinessProbe", "type": "ProbeSettings"}, + "request_settings": {"key": "requestSettings", "type": "OnlineRequestSettings"}, + "scale_settings": {"key": "scaleSettings", "type": "OnlineScaleSettings"}, + } + + _subtype_map = { + "endpoint_compute_type": {"Kubernetes": "KubernetesOnlineDeployment", "Managed": "ManagedOnlineDeployment"} + } + + def __init__( + self, + *, + code_configuration: Optional["_models.CodeConfiguration"] = None, + description: Optional[str] = None, + environment_id: Optional[str] = None, + environment_variables: Optional[Dict[str, str]] = None, + properties: Optional[Dict[str, str]] = None, + app_insights_enabled: bool = False, + egress_public_network_access: Optional[Union[str, "_models.EgressPublicNetworkAccessType"]] = None, + instance_type: Optional[str] = None, + liveness_probe: Optional["_models.ProbeSettings"] = None, + model: Optional[str] = None, + model_mount_path: Optional[str] = None, + readiness_probe: Optional["_models.ProbeSettings"] = None, + request_settings: Optional["_models.OnlineRequestSettings"] = None, + scale_settings: Optional["_models.OnlineScaleSettings"] = None, + **kwargs + ): + """ + :keyword code_configuration: Code configuration for the endpoint deployment. + :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :keyword description: Description of the endpoint deployment. + :paramtype description: str + :keyword environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :paramtype environment_id: str + :keyword environment_variables: Environment variables configuration for the deployment. + :paramtype environment_variables: dict[str, str] + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword app_insights_enabled: If true, enables Application Insights logging. + :paramtype app_insights_enabled: bool + :keyword egress_public_network_access: If Enabled, allow egress public network access. If + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and + "Disabled". + :paramtype egress_public_network_access: str or + ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType + :keyword instance_type: Compute instance type. + :paramtype instance_type: str + :keyword liveness_probe: Liveness probe monitors the health of the container regularly. + :paramtype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :keyword model: The URI path to the model. + :paramtype model: str + :keyword model_mount_path: The path to mount the model in custom container. + :paramtype model_mount_path: str + :keyword readiness_probe: Readiness probe validates if the container is ready to serve traffic. + The properties and defaults are the same as liveness probe. + :paramtype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :keyword request_settings: Request settings for the deployment. + :paramtype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :keyword scale_settings: Scale settings for the deployment. + If it is null or not provided, + it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment + and to DefaultScaleSettings for ManagedOnlineDeployment. + :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + """ + super().__init__( + code_configuration=code_configuration, + description=description, + environment_id=environment_id, + environment_variables=environment_variables, + properties=properties, + **kwargs + ) + self.app_insights_enabled = app_insights_enabled + self.egress_public_network_access = egress_public_network_access + self.endpoint_compute_type = None # type: Optional[str] + self.instance_type = instance_type + self.liveness_probe = liveness_probe + self.model = model + self.model_mount_path = model_mount_path + self.provisioning_state = None + self.readiness_probe = readiness_probe + self.request_settings = request_settings + self.scale_settings = scale_settings + + +class KubernetesOnlineDeployment(OnlineDeploymentProperties): # pylint: disable=too-many-instance-attributes + """Properties specific to a KubernetesOnlineDeployment. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar code_configuration: Code configuration for the endpoint deployment. + :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :ivar description: Description of the endpoint deployment. + :vartype description: str + :ivar environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :vartype environment_id: str + :ivar environment_variables: Environment variables configuration for the deployment. + :vartype environment_variables: dict[str, str] + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar app_insights_enabled: If true, enables Application Insights logging. + :vartype app_insights_enabled: bool + :ivar egress_public_network_access: If Enabled, allow egress public network access. If + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and + "Disabled". + :vartype egress_public_network_access: str or + ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType + :ivar endpoint_compute_type: [Required] The compute type of the endpoint. Required. Known + values are: "Managed", "Kubernetes", and "AzureMLCompute". + :vartype endpoint_compute_type: str or + ~azure.mgmt.machinelearningservices.models.EndpointComputeType + :ivar instance_type: Compute instance type. + :vartype instance_type: str + :ivar liveness_probe: Liveness probe monitors the health of the container regularly. + :vartype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :ivar model: The URI path to the model. + :vartype model: str + :ivar model_mount_path: The path to mount the model in custom container. + :vartype model_mount_path: str + :ivar provisioning_state: Provisioning state for the endpoint deployment. Known values are: + "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState + :ivar readiness_probe: Readiness probe validates if the container is ready to serve traffic. + The properties and defaults are the same as liveness probe. + :vartype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :ivar request_settings: Request settings for the deployment. + :vartype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :ivar scale_settings: Scale settings for the deployment. + If it is null or not provided, + it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment + and to DefaultScaleSettings for ManagedOnlineDeployment. + :vartype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + :ivar container_resource_requirements: The resource requirements for the container (cpu and + memory). + :vartype container_resource_requirements: + ~azure.mgmt.machinelearningservices.models.ContainerResourceRequirements + """ + + _validation = { + "endpoint_compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, + "description": {"key": "description", "type": "str"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "properties": {"key": "properties", "type": "{str}"}, + "app_insights_enabled": {"key": "appInsightsEnabled", "type": "bool"}, + "egress_public_network_access": {"key": "egressPublicNetworkAccess", "type": "str"}, + "endpoint_compute_type": {"key": "endpointComputeType", "type": "str"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "liveness_probe": {"key": "livenessProbe", "type": "ProbeSettings"}, + "model": {"key": "model", "type": "str"}, + "model_mount_path": {"key": "modelMountPath", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "readiness_probe": {"key": "readinessProbe", "type": "ProbeSettings"}, + "request_settings": {"key": "requestSettings", "type": "OnlineRequestSettings"}, + "scale_settings": {"key": "scaleSettings", "type": "OnlineScaleSettings"}, + "container_resource_requirements": { + "key": "containerResourceRequirements", + "type": "ContainerResourceRequirements", + }, + } + + def __init__( + self, + *, + code_configuration: Optional["_models.CodeConfiguration"] = None, + description: Optional[str] = None, + environment_id: Optional[str] = None, + environment_variables: Optional[Dict[str, str]] = None, + properties: Optional[Dict[str, str]] = None, + app_insights_enabled: bool = False, + egress_public_network_access: Optional[Union[str, "_models.EgressPublicNetworkAccessType"]] = None, + instance_type: Optional[str] = None, + liveness_probe: Optional["_models.ProbeSettings"] = None, + model: Optional[str] = None, + model_mount_path: Optional[str] = None, + readiness_probe: Optional["_models.ProbeSettings"] = None, + request_settings: Optional["_models.OnlineRequestSettings"] = None, + scale_settings: Optional["_models.OnlineScaleSettings"] = None, + container_resource_requirements: Optional["_models.ContainerResourceRequirements"] = None, + **kwargs + ): + """ + :keyword code_configuration: Code configuration for the endpoint deployment. + :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :keyword description: Description of the endpoint deployment. + :paramtype description: str + :keyword environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :paramtype environment_id: str + :keyword environment_variables: Environment variables configuration for the deployment. + :paramtype environment_variables: dict[str, str] + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword app_insights_enabled: If true, enables Application Insights logging. + :paramtype app_insights_enabled: bool + :keyword egress_public_network_access: If Enabled, allow egress public network access. If + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and + "Disabled". + :paramtype egress_public_network_access: str or + ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType + :keyword instance_type: Compute instance type. + :paramtype instance_type: str + :keyword liveness_probe: Liveness probe monitors the health of the container regularly. + :paramtype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :keyword model: The URI path to the model. + :paramtype model: str + :keyword model_mount_path: The path to mount the model in custom container. + :paramtype model_mount_path: str + :keyword readiness_probe: Readiness probe validates if the container is ready to serve traffic. + The properties and defaults are the same as liveness probe. + :paramtype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :keyword request_settings: Request settings for the deployment. + :paramtype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :keyword scale_settings: Scale settings for the deployment. + If it is null or not provided, + it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment + and to DefaultScaleSettings for ManagedOnlineDeployment. + :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + :keyword container_resource_requirements: The resource requirements for the container (cpu and + memory). + :paramtype container_resource_requirements: + ~azure.mgmt.machinelearningservices.models.ContainerResourceRequirements + """ + super().__init__( + code_configuration=code_configuration, + description=description, + environment_id=environment_id, + environment_variables=environment_variables, + properties=properties, + app_insights_enabled=app_insights_enabled, + egress_public_network_access=egress_public_network_access, + instance_type=instance_type, + liveness_probe=liveness_probe, + model=model, + model_mount_path=model_mount_path, + readiness_probe=readiness_probe, + request_settings=request_settings, + scale_settings=scale_settings, + **kwargs + ) + self.endpoint_compute_type = "Kubernetes" # type: str + self.container_resource_requirements = container_resource_requirements + + +class KubernetesProperties(_serialization.Model): + """Kubernetes properties. + + :ivar relay_connection_string: Relay connection string. + :vartype relay_connection_string: str + :ivar service_bus_connection_string: ServiceBus connection string. + :vartype service_bus_connection_string: str + :ivar extension_principal_id: Extension principal-id. + :vartype extension_principal_id: str + :ivar extension_instance_release_train: Extension instance release train. + :vartype extension_instance_release_train: str + :ivar vc_name: VC name. + :vartype vc_name: str + :ivar namespace: Compute namespace. + :vartype namespace: str + :ivar default_instance_type: Default instance type. + :vartype default_instance_type: str + :ivar instance_types: Instance Type Schema. + :vartype instance_types: dict[str, + ~azure.mgmt.machinelearningservices.models.InstanceTypeSchema] + """ + + _attribute_map = { + "relay_connection_string": {"key": "relayConnectionString", "type": "str"}, + "service_bus_connection_string": {"key": "serviceBusConnectionString", "type": "str"}, + "extension_principal_id": {"key": "extensionPrincipalId", "type": "str"}, + "extension_instance_release_train": {"key": "extensionInstanceReleaseTrain", "type": "str"}, + "vc_name": {"key": "vcName", "type": "str"}, + "namespace": {"key": "namespace", "type": "str"}, + "default_instance_type": {"key": "defaultInstanceType", "type": "str"}, + "instance_types": {"key": "instanceTypes", "type": "{InstanceTypeSchema}"}, + } + + def __init__( + self, + *, + relay_connection_string: Optional[str] = None, + service_bus_connection_string: Optional[str] = None, + extension_principal_id: Optional[str] = None, + extension_instance_release_train: Optional[str] = None, + vc_name: Optional[str] = None, + namespace: str = "default", + default_instance_type: Optional[str] = None, + instance_types: Optional[Dict[str, "_models.InstanceTypeSchema"]] = None, + **kwargs + ): + """ + :keyword relay_connection_string: Relay connection string. + :paramtype relay_connection_string: str + :keyword service_bus_connection_string: ServiceBus connection string. + :paramtype service_bus_connection_string: str + :keyword extension_principal_id: Extension principal-id. + :paramtype extension_principal_id: str + :keyword extension_instance_release_train: Extension instance release train. + :paramtype extension_instance_release_train: str + :keyword vc_name: VC name. + :paramtype vc_name: str + :keyword namespace: Compute namespace. + :paramtype namespace: str + :keyword default_instance_type: Default instance type. + :paramtype default_instance_type: str + :keyword instance_types: Instance Type Schema. + :paramtype instance_types: dict[str, + ~azure.mgmt.machinelearningservices.models.InstanceTypeSchema] + """ + super().__init__(**kwargs) + self.relay_connection_string = relay_connection_string + self.service_bus_connection_string = service_bus_connection_string + self.extension_principal_id = extension_principal_id + self.extension_instance_release_train = extension_instance_release_train + self.vc_name = vc_name + self.namespace = namespace + self.default_instance_type = default_instance_type + self.instance_types = instance_types + + +class LabelCategory(_serialization.Model): + """Label category definition. + + :ivar classes: Dictionary of label classes in this category. + :vartype classes: dict[str, ~azure.mgmt.machinelearningservices.models.LabelClass] + :ivar display_name: Display name of the label category. + :vartype display_name: str + :ivar multi_select_enabled: Indicates whether it is allowed to select multiple classes in this + category. + :vartype multi_select_enabled: bool + """ + + _attribute_map = { + "classes": {"key": "classes", "type": "{LabelClass}"}, + "display_name": {"key": "displayName", "type": "str"}, + "multi_select_enabled": {"key": "multiSelectEnabled", "type": "bool"}, + } + + def __init__( + self, + *, + classes: Optional[Dict[str, "_models.LabelClass"]] = None, + display_name: Optional[str] = None, + multi_select_enabled: bool = False, + **kwargs + ): + """ + :keyword classes: Dictionary of label classes in this category. + :paramtype classes: dict[str, ~azure.mgmt.machinelearningservices.models.LabelClass] + :keyword display_name: Display name of the label category. + :paramtype display_name: str + :keyword multi_select_enabled: Indicates whether it is allowed to select multiple classes in + this category. + :paramtype multi_select_enabled: bool + """ + super().__init__(**kwargs) + self.classes = classes + self.display_name = display_name + self.multi_select_enabled = multi_select_enabled + + +class LabelClass(_serialization.Model): + """Label class definition. + + :ivar display_name: Display name of the label class. + :vartype display_name: str + :ivar subclasses: Dictionary of subclasses of the label class. + :vartype subclasses: dict[str, ~azure.mgmt.machinelearningservices.models.LabelClass] + """ + + _attribute_map = { + "display_name": {"key": "displayName", "type": "str"}, + "subclasses": {"key": "subclasses", "type": "{LabelClass}"}, + } + + def __init__( + self, + *, + display_name: Optional[str] = None, + subclasses: Optional[Dict[str, "_models.LabelClass"]] = None, + **kwargs + ): + """ + :keyword display_name: Display name of the label class. + :paramtype display_name: str + :keyword subclasses: Dictionary of subclasses of the label class. + :paramtype subclasses: dict[str, ~azure.mgmt.machinelearningservices.models.LabelClass] + """ + super().__init__(**kwargs) + self.display_name = display_name + self.subclasses = subclasses + + +class LabelingDataConfiguration(_serialization.Model): + """Labeling data configuration definition. + + :ivar data_id: Resource Id of the data asset to perform labeling. + :vartype data_id: str + :ivar incremental_data_refresh_enabled: Indicates whether to enable incremental data refresh. + :vartype incremental_data_refresh_enabled: bool + """ + + _attribute_map = { + "data_id": {"key": "dataId", "type": "str"}, + "incremental_data_refresh_enabled": {"key": "incrementalDataRefreshEnabled", "type": "bool"}, + } + + def __init__(self, *, data_id: Optional[str] = None, incremental_data_refresh_enabled: bool = False, **kwargs): + """ + :keyword data_id: Resource Id of the data asset to perform labeling. + :paramtype data_id: str + :keyword incremental_data_refresh_enabled: Indicates whether to enable incremental data + refresh. + :paramtype incremental_data_refresh_enabled: bool + """ + super().__init__(**kwargs) + self.data_id = data_id + self.incremental_data_refresh_enabled = incremental_data_refresh_enabled + + +class LabelingJob(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.LabelingJobProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "LabelingJobProperties"}, + } + + def __init__(self, *, properties: "_models.LabelingJobProperties", **kwargs): + """ + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.LabelingJobProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class LabelingJobMediaProperties(_serialization.Model): + """Properties of a labeling job. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + LabelingJobImageProperties, LabelingJobTextProperties + + All required parameters must be populated in order to send to Azure. + + :ivar media_type: [Required] Media type of the job. Required. Known values are: "Image" and + "Text". + :vartype media_type: str or ~azure.mgmt.machinelearningservices.models.MediaType + """ + + _validation = { + "media_type": {"required": True}, + } + + _attribute_map = { + "media_type": {"key": "mediaType", "type": "str"}, + } + + _subtype_map = {"media_type": {"Image": "LabelingJobImageProperties", "Text": "LabelingJobTextProperties"}} + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.media_type = None # type: Optional[str] + + +class LabelingJobImageProperties(LabelingJobMediaProperties): + """Properties of a labeling job for image data. + + All required parameters must be populated in order to send to Azure. + + :ivar media_type: [Required] Media type of the job. Required. Known values are: "Image" and + "Text". + :vartype media_type: str or ~azure.mgmt.machinelearningservices.models.MediaType + :ivar annotation_type: Annotation type of image labeling job. Known values are: + "Classification", "BoundingBox", and "InstanceSegmentation". + :vartype annotation_type: str or ~azure.mgmt.machinelearningservices.models.ImageAnnotationType + """ + + _validation = { + "media_type": {"required": True}, + } + + _attribute_map = { + "media_type": {"key": "mediaType", "type": "str"}, + "annotation_type": {"key": "annotationType", "type": "str"}, + } + + def __init__(self, *, annotation_type: Optional[Union[str, "_models.ImageAnnotationType"]] = None, **kwargs): + """ + :keyword annotation_type: Annotation type of image labeling job. Known values are: + "Classification", "BoundingBox", and "InstanceSegmentation". + :paramtype annotation_type: str or + ~azure.mgmt.machinelearningservices.models.ImageAnnotationType + """ + super().__init__(**kwargs) + self.media_type = "Image" # type: str + self.annotation_type = annotation_type + + +class LabelingJobInstructions(_serialization.Model): + """Instructions for labeling job. + + :ivar uri: The link to a page with detailed labeling instructions for labelers. + :vartype uri: str + """ + + _attribute_map = { + "uri": {"key": "uri", "type": "str"}, + } + + def __init__(self, *, uri: Optional[str] = None, **kwargs): + """ + :keyword uri: The link to a page with detailed labeling instructions for labelers. + :paramtype uri: str + """ + super().__init__(**kwargs) + self.uri = uri + + +class LabelingJobProperties(JobBaseProperties): # pylint: disable=too-many-instance-attributes + """Labeling job definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar component_id: ARM resource ID of the component resource. + :vartype component_id: str + :ivar compute_id: ARM resource ID of the compute resource. + :vartype compute_id: str + :ivar display_name: Display name of job. + :vartype display_name: str + :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :vartype experiment_name: str + :ivar identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar job_type: [Required] Specifies the type of job. Required. Known values are: "AutoML", + "Command", "Labeling", "Sweep", "Pipeline", and "Spark". + :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :ivar services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", and "Scheduled". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus + :ivar created_date_time: Created time of the job in UTC timezone. + :vartype created_date_time: ~datetime.datetime + :ivar data_configuration: Configuration of data used in the job. + :vartype data_configuration: + ~azure.mgmt.machinelearningservices.models.LabelingDataConfiguration + :ivar job_instructions: Labeling instructions of the job. + :vartype job_instructions: ~azure.mgmt.machinelearningservices.models.LabelingJobInstructions + :ivar label_categories: Label categories of the job. + :vartype label_categories: dict[str, ~azure.mgmt.machinelearningservices.models.LabelCategory] + :ivar labeling_job_media_properties: Media type specific properties in the job. + :vartype labeling_job_media_properties: + ~azure.mgmt.machinelearningservices.models.LabelingJobMediaProperties + :ivar ml_assist_configuration: Configuration of MLAssist feature in the job. + :vartype ml_assist_configuration: + ~azure.mgmt.machinelearningservices.models.MLAssistConfiguration + :ivar progress_metrics: Progress metrics of the job. + :vartype progress_metrics: ~azure.mgmt.machinelearningservices.models.ProgressMetrics + :ivar project_id: Internal id of the job(Previously called project). + :vartype project_id: str + :ivar provisioning_state: Specifies the labeling job provisioning state. Known values are: + "Succeeded", "Failed", "Canceled", and "InProgress". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.JobProvisioningState + :ivar status_messages: Status messages of the job. + :vartype status_messages: list[~azure.mgmt.machinelearningservices.models.StatusMessage] + """ + + _validation = { + "job_type": {"required": True}, + "status": {"readonly": True}, + "created_date_time": {"readonly": True}, + "progress_metrics": {"readonly": True}, + "project_id": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "status_messages": {"readonly": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, + "created_date_time": {"key": "createdDateTime", "type": "iso-8601"}, + "data_configuration": {"key": "dataConfiguration", "type": "LabelingDataConfiguration"}, + "job_instructions": {"key": "jobInstructions", "type": "LabelingJobInstructions"}, + "label_categories": {"key": "labelCategories", "type": "{LabelCategory}"}, + "labeling_job_media_properties": {"key": "labelingJobMediaProperties", "type": "LabelingJobMediaProperties"}, + "ml_assist_configuration": {"key": "mlAssistConfiguration", "type": "MLAssistConfiguration"}, + "progress_metrics": {"key": "progressMetrics", "type": "ProgressMetrics"}, + "project_id": {"key": "projectId", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "status_messages": {"key": "statusMessages", "type": "[StatusMessage]"}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + component_id: Optional[str] = None, + compute_id: Optional[str] = None, + display_name: Optional[str] = None, + experiment_name: str = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: bool = False, + services: Optional[Dict[str, "_models.JobService"]] = None, + data_configuration: Optional["_models.LabelingDataConfiguration"] = None, + job_instructions: Optional["_models.LabelingJobInstructions"] = None, + label_categories: Optional[Dict[str, "_models.LabelCategory"]] = None, + labeling_job_media_properties: Optional["_models.LabelingJobMediaProperties"] = None, + ml_assist_configuration: Optional["_models.MLAssistConfiguration"] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword component_id: ARM resource ID of the component resource. + :paramtype component_id: str + :keyword compute_id: ARM resource ID of the compute resource. + :paramtype compute_id: str + :keyword display_name: Display name of job. + :paramtype display_name: str + :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :paramtype experiment_name: str + :keyword identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :keyword data_configuration: Configuration of data used in the job. + :paramtype data_configuration: + ~azure.mgmt.machinelearningservices.models.LabelingDataConfiguration + :keyword job_instructions: Labeling instructions of the job. + :paramtype job_instructions: ~azure.mgmt.machinelearningservices.models.LabelingJobInstructions + :keyword label_categories: Label categories of the job. + :paramtype label_categories: dict[str, + ~azure.mgmt.machinelearningservices.models.LabelCategory] + :keyword labeling_job_media_properties: Media type specific properties in the job. + :paramtype labeling_job_media_properties: + ~azure.mgmt.machinelearningservices.models.LabelingJobMediaProperties + :keyword ml_assist_configuration: Configuration of MLAssist feature in the job. + :paramtype ml_assist_configuration: + ~azure.mgmt.machinelearningservices.models.MLAssistConfiguration + """ + super().__init__( + description=description, + properties=properties, + tags=tags, + component_id=component_id, + compute_id=compute_id, + display_name=display_name, + experiment_name=experiment_name, + identity=identity, + is_archived=is_archived, + services=services, + **kwargs + ) + self.job_type = "Labeling" # type: str + self.created_date_time = None + self.data_configuration = data_configuration + self.job_instructions = job_instructions + self.label_categories = label_categories + self.labeling_job_media_properties = labeling_job_media_properties + self.ml_assist_configuration = ml_assist_configuration + self.progress_metrics = None + self.project_id = None + self.provisioning_state = None + self.status_messages = None + + +class LabelingJobResourceArmPaginatedResult(_serialization.Model): + """A paginated list of LabelingJob entities. + + :ivar next_link: The link to the next page of LabelingJob objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type LabelingJob. + :vartype value: list[~azure.mgmt.machinelearningservices.models.LabelingJob] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[LabelingJob]"}, + } + + def __init__( + self, *, next_link: Optional[str] = None, value: Optional[List["_models.LabelingJob"]] = None, **kwargs + ): + """ + :keyword next_link: The link to the next page of LabelingJob objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type LabelingJob. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.LabelingJob] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class LabelingJobTextProperties(LabelingJobMediaProperties): + """Properties of a labeling job for text data. + + All required parameters must be populated in order to send to Azure. + + :ivar media_type: [Required] Media type of the job. Required. Known values are: "Image" and + "Text". + :vartype media_type: str or ~azure.mgmt.machinelearningservices.models.MediaType + :ivar annotation_type: Annotation type of text labeling job. Known values are: "Classification" + and "NamedEntityRecognition". + :vartype annotation_type: str or ~azure.mgmt.machinelearningservices.models.TextAnnotationType + """ + + _validation = { + "media_type": {"required": True}, + } + + _attribute_map = { + "media_type": {"key": "mediaType", "type": "str"}, + "annotation_type": {"key": "annotationType", "type": "str"}, + } + + def __init__(self, *, annotation_type: Optional[Union[str, "_models.TextAnnotationType"]] = None, **kwargs): + """ + :keyword annotation_type: Annotation type of text labeling job. Known values are: + "Classification" and "NamedEntityRecognition". + :paramtype annotation_type: str or + ~azure.mgmt.machinelearningservices.models.TextAnnotationType + """ + super().__init__(**kwargs) + self.media_type = "Text" # type: str + self.annotation_type = annotation_type + + +class ListAmlUserFeatureResult(_serialization.Model): + """The List Aml user feature operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: The list of AML user facing features. + :vartype value: list[~azure.mgmt.machinelearningservices.models.AmlUserFeature] + :ivar next_link: The URI to fetch the next page of AML user features information. Call + ListNext() with this to fetch the next page of AML user features information. + :vartype next_link: str + """ + + _validation = { + "value": {"readonly": True}, + "next_link": {"readonly": True}, + } + + _attribute_map = { + "value": {"key": "value", "type": "[AmlUserFeature]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.value = None + self.next_link = None + + +class ListNotebookKeysResult(_serialization.Model): + """ListNotebookKeysResult. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar primary_access_key: + :vartype primary_access_key: str + :ivar secondary_access_key: + :vartype secondary_access_key: str + """ + + _validation = { + "primary_access_key": {"readonly": True}, + "secondary_access_key": {"readonly": True}, + } + + _attribute_map = { + "primary_access_key": {"key": "primaryAccessKey", "type": "str"}, + "secondary_access_key": {"key": "secondaryAccessKey", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.primary_access_key = None + self.secondary_access_key = None + + +class ListStorageAccountKeysResult(_serialization.Model): + """ListStorageAccountKeysResult. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar user_storage_key: + :vartype user_storage_key: str + """ + + _validation = { + "user_storage_key": {"readonly": True}, + } + + _attribute_map = { + "user_storage_key": {"key": "userStorageKey", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.user_storage_key = None + + +class ListUsagesResult(_serialization.Model): + """The List Usages operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: The list of AML resource usages. + :vartype value: list[~azure.mgmt.machinelearningservices.models.Usage] + :ivar next_link: The URI to fetch the next page of AML resource usage information. Call + ListNext() with this to fetch the next page of AML resource usage information. + :vartype next_link: str + """ + + _validation = { + "value": {"readonly": True}, + "next_link": {"readonly": True}, + } + + _attribute_map = { + "value": {"key": "value", "type": "[Usage]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.value = None + self.next_link = None + + +class ListWorkspaceKeysResult(_serialization.Model): + """ListWorkspaceKeysResult. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar user_storage_key: + :vartype user_storage_key: str + :ivar user_storage_resource_id: + :vartype user_storage_resource_id: str + :ivar app_insights_instrumentation_key: + :vartype app_insights_instrumentation_key: str + :ivar container_registry_credentials: + :vartype container_registry_credentials: + ~azure.mgmt.machinelearningservices.models.RegistryListCredentialsResult + :ivar notebook_access_keys: + :vartype notebook_access_keys: + ~azure.mgmt.machinelearningservices.models.ListNotebookKeysResult + """ + + _validation = { + "user_storage_key": {"readonly": True}, + "user_storage_resource_id": {"readonly": True}, + "app_insights_instrumentation_key": {"readonly": True}, + "container_registry_credentials": {"readonly": True}, + "notebook_access_keys": {"readonly": True}, + } + + _attribute_map = { + "user_storage_key": {"key": "userStorageKey", "type": "str"}, + "user_storage_resource_id": {"key": "userStorageResourceId", "type": "str"}, + "app_insights_instrumentation_key": {"key": "appInsightsInstrumentationKey", "type": "str"}, + "container_registry_credentials": { + "key": "containerRegistryCredentials", + "type": "RegistryListCredentialsResult", + }, + "notebook_access_keys": {"key": "notebookAccessKeys", "type": "ListNotebookKeysResult"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.user_storage_key = None + self.user_storage_resource_id = None + self.app_insights_instrumentation_key = None + self.container_registry_credentials = None + self.notebook_access_keys = None + + +class ListWorkspaceQuotas(_serialization.Model): + """The List WorkspaceQuotasByVMFamily operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: The list of Workspace Quotas by VM Family. + :vartype value: list[~azure.mgmt.machinelearningservices.models.ResourceQuota] + :ivar next_link: The URI to fetch the next page of workspace quota information by VM Family. + Call ListNext() with this to fetch the next page of Workspace Quota information. + :vartype next_link: str + """ + + _validation = { + "value": {"readonly": True}, + "next_link": {"readonly": True}, + } + + _attribute_map = { + "value": {"key": "value", "type": "[ResourceQuota]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.value = None + self.next_link = None + + +class LiteralJobInput(JobInput): + """Literal input type. + + All required parameters must be populated in order to send to Azure. + + :ivar description: Description for the input. + :vartype description: str + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :ivar value: [Required] Literal value for the input. Required. + :vartype value: str + """ + + _validation = { + "job_input_type": {"required": True}, + "value": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "value": {"key": "value", "type": "str"}, + } + + def __init__(self, *, value: str, description: Optional[str] = None, **kwargs): + """ + :keyword description: Description for the input. + :paramtype description: str + :keyword value: [Required] Literal value for the input. Required. + :paramtype value: str + """ + super().__init__(description=description, **kwargs) + self.job_input_type = "literal" # type: str + self.value = value + + +class ManagedIdentity(IdentityConfiguration): + """Managed identity configuration. + + All required parameters must be populated in order to send to Azure. + + :ivar identity_type: [Required] Specifies the type of identity framework. Required. Known + values are: "Managed", "AMLToken", and "UserIdentity". + :vartype identity_type: str or + ~azure.mgmt.machinelearningservices.models.IdentityConfigurationType + :ivar client_id: Specifies a user-assigned identity by client ID. For system-assigned, do not + set this field. + :vartype client_id: str + :ivar object_id: Specifies a user-assigned identity by object ID. For system-assigned, do not + set this field. + :vartype object_id: str + :ivar resource_id: Specifies a user-assigned identity by ARM resource ID. For system-assigned, + do not set this field. + :vartype resource_id: str + """ + + _validation = { + "identity_type": {"required": True}, + } + + _attribute_map = { + "identity_type": {"key": "identityType", "type": "str"}, + "client_id": {"key": "clientId", "type": "str"}, + "object_id": {"key": "objectId", "type": "str"}, + "resource_id": {"key": "resourceId", "type": "str"}, + } + + def __init__( + self, + *, + client_id: Optional[str] = None, + object_id: Optional[str] = None, + resource_id: Optional[str] = None, + **kwargs + ): + """ + :keyword client_id: Specifies a user-assigned identity by client ID. For system-assigned, do + not set this field. + :paramtype client_id: str + :keyword object_id: Specifies a user-assigned identity by object ID. For system-assigned, do + not set this field. + :paramtype object_id: str + :keyword resource_id: Specifies a user-assigned identity by ARM resource ID. For + system-assigned, do not set this field. + :paramtype resource_id: str + """ + super().__init__(**kwargs) + self.identity_type = "Managed" # type: str + self.client_id = client_id + self.object_id = object_id + self.resource_id = resource_id + + +class ManagedIdentityAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): + """ManagedIdentityAuthTypeWorkspaceConnectionProperties. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", and + "AccessKey". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. + :vartype category: str + :ivar target: + :vartype target: str + :ivar value: Value details of the workspace connection. + :vartype value: str + :ivar value_format: format for the workspace connection value. "JSON" + :vartype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :ivar credentials: + :vartype credentials: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionManagedIdentity + """ + + _validation = { + "auth_type": {"required": True}, + } + + _attribute_map = { + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "target": {"key": "target", "type": "str"}, + "value": {"key": "value", "type": "str"}, + "value_format": {"key": "valueFormat", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionManagedIdentity"}, + } + + def __init__( + self, + *, + category: Optional[str] = None, + target: Optional[str] = None, + value: Optional[str] = None, + value_format: Optional[Union[str, "_models.ValueFormat"]] = None, + credentials: Optional["_models.WorkspaceConnectionManagedIdentity"] = None, + **kwargs + ): + """ + :keyword category: Category of the connection. + :paramtype category: str + :keyword target: + :paramtype target: str + :keyword value: Value details of the workspace connection. + :paramtype value: str + :keyword value_format: format for the workspace connection value. "JSON" + :paramtype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :keyword credentials: + :paramtype credentials: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionManagedIdentity + """ + super().__init__(category=category, target=target, value=value, value_format=value_format, **kwargs) + self.auth_type = "ManagedIdentity" # type: str + self.credentials = credentials + + +class ManagedOnlineDeployment(OnlineDeploymentProperties): # pylint: disable=too-many-instance-attributes + """Properties specific to a ManagedOnlineDeployment. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar code_configuration: Code configuration for the endpoint deployment. + :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :ivar description: Description of the endpoint deployment. + :vartype description: str + :ivar environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :vartype environment_id: str + :ivar environment_variables: Environment variables configuration for the deployment. + :vartype environment_variables: dict[str, str] + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar app_insights_enabled: If true, enables Application Insights logging. + :vartype app_insights_enabled: bool + :ivar egress_public_network_access: If Enabled, allow egress public network access. If + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and + "Disabled". + :vartype egress_public_network_access: str or + ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType + :ivar endpoint_compute_type: [Required] The compute type of the endpoint. Required. Known + values are: "Managed", "Kubernetes", and "AzureMLCompute". + :vartype endpoint_compute_type: str or + ~azure.mgmt.machinelearningservices.models.EndpointComputeType + :ivar instance_type: Compute instance type. + :vartype instance_type: str + :ivar liveness_probe: Liveness probe monitors the health of the container regularly. + :vartype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :ivar model: The URI path to the model. + :vartype model: str + :ivar model_mount_path: The path to mount the model in custom container. + :vartype model_mount_path: str + :ivar provisioning_state: Provisioning state for the endpoint deployment. Known values are: + "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState + :ivar readiness_probe: Readiness probe validates if the container is ready to serve traffic. + The properties and defaults are the same as liveness probe. + :vartype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :ivar request_settings: Request settings for the deployment. + :vartype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :ivar scale_settings: Scale settings for the deployment. + If it is null or not provided, + it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment + and to DefaultScaleSettings for ManagedOnlineDeployment. + :vartype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + """ + + _validation = { + "endpoint_compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, + "description": {"key": "description", "type": "str"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "properties": {"key": "properties", "type": "{str}"}, + "app_insights_enabled": {"key": "appInsightsEnabled", "type": "bool"}, + "egress_public_network_access": {"key": "egressPublicNetworkAccess", "type": "str"}, + "endpoint_compute_type": {"key": "endpointComputeType", "type": "str"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "liveness_probe": {"key": "livenessProbe", "type": "ProbeSettings"}, + "model": {"key": "model", "type": "str"}, + "model_mount_path": {"key": "modelMountPath", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "readiness_probe": {"key": "readinessProbe", "type": "ProbeSettings"}, + "request_settings": {"key": "requestSettings", "type": "OnlineRequestSettings"}, + "scale_settings": {"key": "scaleSettings", "type": "OnlineScaleSettings"}, + } + + def __init__( + self, + *, + code_configuration: Optional["_models.CodeConfiguration"] = None, + description: Optional[str] = None, + environment_id: Optional[str] = None, + environment_variables: Optional[Dict[str, str]] = None, + properties: Optional[Dict[str, str]] = None, + app_insights_enabled: bool = False, + egress_public_network_access: Optional[Union[str, "_models.EgressPublicNetworkAccessType"]] = None, + instance_type: Optional[str] = None, + liveness_probe: Optional["_models.ProbeSettings"] = None, + model: Optional[str] = None, + model_mount_path: Optional[str] = None, + readiness_probe: Optional["_models.ProbeSettings"] = None, + request_settings: Optional["_models.OnlineRequestSettings"] = None, + scale_settings: Optional["_models.OnlineScaleSettings"] = None, + **kwargs + ): + """ + :keyword code_configuration: Code configuration for the endpoint deployment. + :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :keyword description: Description of the endpoint deployment. + :paramtype description: str + :keyword environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :paramtype environment_id: str + :keyword environment_variables: Environment variables configuration for the deployment. + :paramtype environment_variables: dict[str, str] + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword app_insights_enabled: If true, enables Application Insights logging. + :paramtype app_insights_enabled: bool + :keyword egress_public_network_access: If Enabled, allow egress public network access. If + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and + "Disabled". + :paramtype egress_public_network_access: str or + ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType + :keyword instance_type: Compute instance type. + :paramtype instance_type: str + :keyword liveness_probe: Liveness probe monitors the health of the container regularly. + :paramtype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :keyword model: The URI path to the model. + :paramtype model: str + :keyword model_mount_path: The path to mount the model in custom container. + :paramtype model_mount_path: str + :keyword readiness_probe: Readiness probe validates if the container is ready to serve traffic. + The properties and defaults are the same as liveness probe. + :paramtype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :keyword request_settings: Request settings for the deployment. + :paramtype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :keyword scale_settings: Scale settings for the deployment. + If it is null or not provided, + it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment + and to DefaultScaleSettings for ManagedOnlineDeployment. + :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + """ + super().__init__( + code_configuration=code_configuration, + description=description, + environment_id=environment_id, + environment_variables=environment_variables, + properties=properties, + app_insights_enabled=app_insights_enabled, + egress_public_network_access=egress_public_network_access, + instance_type=instance_type, + liveness_probe=liveness_probe, + model=model, + model_mount_path=model_mount_path, + readiness_probe=readiness_probe, + request_settings=request_settings, + scale_settings=scale_settings, + **kwargs + ) + self.endpoint_compute_type = "Managed" # type: str + + +class ManagedServiceIdentity(_serialization.Model): + """Managed service identity (system assigned and/or user assigned identities). + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar principal_id: The service principal ID of the system assigned identity. This property + will only be provided for a system assigned identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of the system assigned identity. This property will only be + provided for a system assigned identity. + :vartype tenant_id: str + :ivar type: Type of managed service identity (where both SystemAssigned and UserAssigned types + are allowed). Required. Known values are: "None", "SystemAssigned", "UserAssigned", and + "SystemAssigned,UserAssigned". + :vartype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType + :ivar user_assigned_identities: The set of user assigned identities associated with the + resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. + The dictionary values can be empty objects ({}) in requests. + :vartype user_assigned_identities: dict[str, + ~azure.mgmt.machinelearningservices.models.UserAssignedIdentity] + """ + + _validation = { + "principal_id": {"readonly": True}, + "tenant_id": {"readonly": True}, + "type": {"required": True}, + } + + _attribute_map = { + "principal_id": {"key": "principalId", "type": "str"}, + "tenant_id": {"key": "tenantId", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "user_assigned_identities": {"key": "userAssignedIdentities", "type": "{UserAssignedIdentity}"}, + } + + def __init__( + self, + *, + type: Union[str, "_models.ManagedServiceIdentityType"], + user_assigned_identities: Optional[Dict[str, "_models.UserAssignedIdentity"]] = None, + **kwargs + ): + """ + :keyword type: Type of managed service identity (where both SystemAssigned and UserAssigned + types are allowed). Required. Known values are: "None", "SystemAssigned", "UserAssigned", and + "SystemAssigned,UserAssigned". + :paramtype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType + :keyword user_assigned_identities: The set of user assigned identities associated with the + resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. + The dictionary values can be empty objects ({}) in requests. + :paramtype user_assigned_identities: dict[str, + ~azure.mgmt.machinelearningservices.models.UserAssignedIdentity] + """ + super().__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = type + self.user_assigned_identities = user_assigned_identities + + +class MedianStoppingPolicy(EarlyTerminationPolicy): + """Defines an early termination policy based on running averages of the primary metric of all runs. + + All required parameters must be populated in order to send to Azure. + + :ivar delay_evaluation: Number of intervals by which to delay the first evaluation. + :vartype delay_evaluation: int + :ivar evaluation_interval: Interval (number of runs) between policy evaluations. + :vartype evaluation_interval: int + :ivar policy_type: [Required] Name of policy configuration. Required. Known values are: + "Bandit", "MedianStopping", and "TruncationSelection". + :vartype policy_type: str or + ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicyType + """ + + _validation = { + "policy_type": {"required": True}, + } + + _attribute_map = { + "delay_evaluation": {"key": "delayEvaluation", "type": "int"}, + "evaluation_interval": {"key": "evaluationInterval", "type": "int"}, + "policy_type": {"key": "policyType", "type": "str"}, + } + + def __init__(self, *, delay_evaluation: int = 0, evaluation_interval: int = 0, **kwargs): + """ + :keyword delay_evaluation: Number of intervals by which to delay the first evaluation. + :paramtype delay_evaluation: int + :keyword evaluation_interval: Interval (number of runs) between policy evaluations. + :paramtype evaluation_interval: int + """ + super().__init__(delay_evaluation=delay_evaluation, evaluation_interval=evaluation_interval, **kwargs) + self.policy_type = "MedianStopping" # type: str + + +class MLAssistConfiguration(_serialization.Model): + """Labeling MLAssist configuration definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + MLAssistConfigurationDisabled, MLAssistConfigurationEnabled + + All required parameters must be populated in order to send to Azure. + + :ivar ml_assist: [Required] Indicates whether MLAssist feature is enabled. Required. Known + values are: "Enabled" and "Disabled". + :vartype ml_assist: str or ~azure.mgmt.machinelearningservices.models.MLAssistConfigurationType + """ + + _validation = { + "ml_assist": {"required": True}, + } + + _attribute_map = { + "ml_assist": {"key": "mlAssist", "type": "str"}, + } + + _subtype_map = { + "ml_assist": {"Disabled": "MLAssistConfigurationDisabled", "Enabled": "MLAssistConfigurationEnabled"} + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.ml_assist = None # type: Optional[str] + + +class MLAssistConfigurationDisabled(MLAssistConfiguration): + """Labeling MLAssist configuration definition when MLAssist is disabled. + + All required parameters must be populated in order to send to Azure. + + :ivar ml_assist: [Required] Indicates whether MLAssist feature is enabled. Required. Known + values are: "Enabled" and "Disabled". + :vartype ml_assist: str or ~azure.mgmt.machinelearningservices.models.MLAssistConfigurationType + """ + + _validation = { + "ml_assist": {"required": True}, + } + + _attribute_map = { + "ml_assist": {"key": "mlAssist", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.ml_assist = "Disabled" # type: str + + +class MLAssistConfigurationEnabled(MLAssistConfiguration): + """Labeling MLAssist configuration definition when MLAssist is enabled. + + All required parameters must be populated in order to send to Azure. + + :ivar ml_assist: [Required] Indicates whether MLAssist feature is enabled. Required. Known + values are: "Enabled" and "Disabled". + :vartype ml_assist: str or ~azure.mgmt.machinelearningservices.models.MLAssistConfigurationType + :ivar inferencing_compute_binding: [Required] AML compute binding used in inferencing. + Required. + :vartype inferencing_compute_binding: str + :ivar training_compute_binding: [Required] AML compute binding used in training. Required. + :vartype training_compute_binding: str + """ + + _validation = { + "ml_assist": {"required": True}, + "inferencing_compute_binding": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + "training_compute_binding": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "ml_assist": {"key": "mlAssist", "type": "str"}, + "inferencing_compute_binding": {"key": "inferencingComputeBinding", "type": "str"}, + "training_compute_binding": {"key": "trainingComputeBinding", "type": "str"}, + } + + def __init__(self, *, inferencing_compute_binding: str, training_compute_binding: str, **kwargs): + """ + :keyword inferencing_compute_binding: [Required] AML compute binding used in inferencing. + Required. + :paramtype inferencing_compute_binding: str + :keyword training_compute_binding: [Required] AML compute binding used in training. Required. + :paramtype training_compute_binding: str + """ + super().__init__(**kwargs) + self.ml_assist = "Enabled" # type: str + self.inferencing_compute_binding = inferencing_compute_binding + self.training_compute_binding = training_compute_binding + + +class MLFlowModelJobInput(AssetJobInput, JobInput): + """MLFlowModelJobInput. + + All required parameters must be populated in order to send to Azure. + + :ivar description: Description for the input. + :vartype description: str + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :ivar uri: [Required] Input Asset URI. Required. + :vartype uri: str + """ + + _validation = { + "job_input_type": {"required": True}, + "uri": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + } + + def __init__( + self, + *, + uri: str, + description: Optional[str] = None, + mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, + **kwargs + ): + """ + :keyword description: Description for the input. + :paramtype description: str + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :keyword uri: [Required] Input Asset URI. Required. + :paramtype uri: str + """ + super().__init__(mode=mode, uri=uri, description=description, **kwargs) + self.description = description + self.job_input_type = "mlflow_model" # type: str + self.mode = mode + self.uri = uri + + +class MLFlowModelJobOutput(AssetJobOutput, JobOutput): + """MLFlowModelJobOutput. + + All required parameters must be populated in order to send to Azure. + + :ivar description: Description for the output. + :vartype description: str + :ivar job_output_type: [Required] Specifies the type of job. Required. Known values are: + "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". + :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :ivar uri: Output Asset URI. + :vartype uri: str + """ + + _validation = { + "job_output_type": {"required": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, + uri: Optional[str] = None, + **kwargs + ): + """ + :keyword description: Description for the output. + :paramtype description: str + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :keyword uri: Output Asset URI. + :paramtype uri: str + """ + super().__init__(mode=mode, uri=uri, description=description, **kwargs) + self.description = description + self.job_output_type = "mlflow_model" # type: str + self.mode = mode + self.uri = uri + + +class MLTableData(DataVersionBaseProperties): + """MLTable data definition. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_anonymous: If the name version are system generated (anonymous registration). + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar data_type: [Required] Specifies the type of data. Required. Known values are: "uri_file", + "uri_folder", and "mltable". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType + :ivar data_uri: [Required] Uri of the data. Usage/meaning depends on + Microsoft.MachineLearning.ManagementFrontEnd.Contracts.V20221001Preview.Assets.DataVersionBase.DataType. + Required. + :vartype data_uri: str + :ivar referenced_uris: Uris referenced in the MLTable definition (required for lineage). + :vartype referenced_uris: list[str] + """ + + _validation = { + "data_type": {"required": True}, + "data_uri": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "data_type": {"key": "dataType", "type": "str"}, + "data_uri": {"key": "dataUri", "type": "str"}, + "referenced_uris": {"key": "referencedUris", "type": "[str]"}, + } + + def __init__( + self, + *, + data_uri: str, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_anonymous: bool = False, + is_archived: bool = False, + referenced_uris: Optional[List[str]] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_anonymous: If the name version are system generated (anonymous registration). + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword data_uri: [Required] Uri of the data. Usage/meaning depends on + Microsoft.MachineLearning.ManagementFrontEnd.Contracts.V20221001Preview.Assets.DataVersionBase.DataType. + Required. + :paramtype data_uri: str + :keyword referenced_uris: Uris referenced in the MLTable definition (required for lineage). + :paramtype referenced_uris: list[str] + """ + super().__init__( + description=description, + properties=properties, + tags=tags, + is_anonymous=is_anonymous, + is_archived=is_archived, + data_uri=data_uri, + **kwargs + ) + self.data_type = "mltable" # type: str + self.referenced_uris = referenced_uris + + +class MLTableJobInput(AssetJobInput, JobInput): + """MLTableJobInput. + + All required parameters must be populated in order to send to Azure. + + :ivar description: Description for the input. + :vartype description: str + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :ivar uri: [Required] Input Asset URI. Required. + :vartype uri: str + """ + + _validation = { + "job_input_type": {"required": True}, + "uri": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + } + + def __init__( + self, + *, + uri: str, + description: Optional[str] = None, + mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, + **kwargs + ): + """ + :keyword description: Description for the input. + :paramtype description: str + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :keyword uri: [Required] Input Asset URI. Required. + :paramtype uri: str + """ + super().__init__(mode=mode, uri=uri, description=description, **kwargs) + self.description = description + self.job_input_type = "mltable" # type: str + self.mode = mode + self.uri = uri + + +class MLTableJobOutput(AssetJobOutput, JobOutput): + """MLTableJobOutput. + + All required parameters must be populated in order to send to Azure. + + :ivar description: Description for the output. + :vartype description: str + :ivar job_output_type: [Required] Specifies the type of job. Required. Known values are: + "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". + :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :ivar uri: Output Asset URI. + :vartype uri: str + """ + + _validation = { + "job_output_type": {"required": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, + uri: Optional[str] = None, + **kwargs + ): + """ + :keyword description: Description for the output. + :paramtype description: str + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :keyword uri: Output Asset URI. + :paramtype uri: str + """ + super().__init__(mode=mode, uri=uri, description=description, **kwargs) + self.description = description + self.job_output_type = "mltable" # type: str + self.mode = mode + self.uri = uri + + +class ModelContainer(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.ModelContainerProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ModelContainerProperties"}, + } + + def __init__(self, *, properties: "_models.ModelContainerProperties", **kwargs): + """ + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.ModelContainerProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class ModelContainerProperties(AssetContainer): + """ModelContainerProperties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar latest_version: The latest version inside this container. + :vartype latest_version: str + :ivar next_version: The next auto incremental version. + :vartype next_version: str + :ivar provisioning_state: Provisioning state for the model container. Known values are: + "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.AssetProvisioningState + """ + + _validation = { + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_archived: bool = False, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + """ + super().__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) + self.provisioning_state = None + + +class ModelContainerResourceArmPaginatedResult(_serialization.Model): + """A paginated list of ModelContainer entities. + + :ivar next_link: The link to the next page of ModelContainer objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type ModelContainer. + :vartype value: list[~azure.mgmt.machinelearningservices.models.ModelContainer] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[ModelContainer]"}, + } + + def __init__( + self, *, next_link: Optional[str] = None, value: Optional[List["_models.ModelContainer"]] = None, **kwargs + ): + """ + :keyword next_link: The link to the next page of ModelContainer objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type ModelContainer. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.ModelContainer] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class ModelVersion(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.ModelVersionProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ModelVersionProperties"}, + } + + def __init__(self, *, properties: "_models.ModelVersionProperties", **kwargs): + """ + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.ModelVersionProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class ModelVersionProperties(AssetBase): + """Model asset version details. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_anonymous: If the name version are system generated (anonymous registration). + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar flavors: Mapping of model flavors to their properties. + :vartype flavors: dict[str, ~azure.mgmt.machinelearningservices.models.FlavorData] + :ivar job_name: Name of the training job which produced this model. + :vartype job_name: str + :ivar model_type: The storage format for this entity. Used for NCD. + :vartype model_type: str + :ivar model_uri: The URI path to the model contents. + :vartype model_uri: str + :ivar provisioning_state: Provisioning state for the model version. Known values are: + "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.AssetProvisioningState + """ + + _validation = { + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "flavors": {"key": "flavors", "type": "{FlavorData}"}, + "job_name": {"key": "jobName", "type": "str"}, + "model_type": {"key": "modelType", "type": "str"}, + "model_uri": {"key": "modelUri", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_anonymous: bool = False, + is_archived: bool = False, + flavors: Optional[Dict[str, "_models.FlavorData"]] = None, + job_name: Optional[str] = None, + model_type: Optional[str] = None, + model_uri: Optional[str] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_anonymous: If the name version are system generated (anonymous registration). + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword flavors: Mapping of model flavors to their properties. + :paramtype flavors: dict[str, ~azure.mgmt.machinelearningservices.models.FlavorData] + :keyword job_name: Name of the training job which produced this model. + :paramtype job_name: str + :keyword model_type: The storage format for this entity. Used for NCD. + :paramtype model_type: str + :keyword model_uri: The URI path to the model contents. + :paramtype model_uri: str + """ + super().__init__( + description=description, + properties=properties, + tags=tags, + is_anonymous=is_anonymous, + is_archived=is_archived, + **kwargs + ) + self.flavors = flavors + self.job_name = job_name + self.model_type = model_type + self.model_uri = model_uri + self.provisioning_state = None + + +class ModelVersionResourceArmPaginatedResult(_serialization.Model): + """A paginated list of ModelVersion entities. + + :ivar next_link: The link to the next page of ModelVersion objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type ModelVersion. + :vartype value: list[~azure.mgmt.machinelearningservices.models.ModelVersion] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[ModelVersion]"}, + } + + def __init__( + self, *, next_link: Optional[str] = None, value: Optional[List["_models.ModelVersion"]] = None, **kwargs + ): + """ + :keyword next_link: The link to the next page of ModelVersion objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type ModelVersion. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.ModelVersion] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class Mpi(DistributionConfiguration): + """MPI distribution configuration. + + All required parameters must be populated in order to send to Azure. + + :ivar distribution_type: [Required] Specifies the type of distribution framework. Required. + Known values are: "PyTorch", "TensorFlow", and "Mpi". + :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType + :ivar process_count_per_instance: Number of processes per MPI node. + :vartype process_count_per_instance: int + """ + + _validation = { + "distribution_type": {"required": True}, + } + + _attribute_map = { + "distribution_type": {"key": "distributionType", "type": "str"}, + "process_count_per_instance": {"key": "processCountPerInstance", "type": "int"}, + } + + def __init__(self, *, process_count_per_instance: Optional[int] = None, **kwargs): + """ + :keyword process_count_per_instance: Number of processes per MPI node. + :paramtype process_count_per_instance: int + """ + super().__init__(**kwargs) + self.distribution_type = "Mpi" # type: str + self.process_count_per_instance = process_count_per_instance + + +class NlpFixedParameters(_serialization.Model): + """Fixed training parameters that won't be swept over during AutoML NLP training. + + :ivar gradient_accumulation_steps: Number of steps to accumulate gradients over before running + a backward pass. + :vartype gradient_accumulation_steps: int + :ivar learning_rate: The learning rate for the training procedure. + :vartype learning_rate: float + :ivar learning_rate_scheduler: The type of learning rate schedule to use during the training + procedure. Known values are: "None", "Linear", "Cosine", "CosineWithRestarts", "Polynomial", + "Constant", and "ConstantWithWarmup". + :vartype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.NlpLearningRateScheduler + :ivar model_name: The name of the model to train. + :vartype model_name: str + :ivar number_of_epochs: Number of training epochs. + :vartype number_of_epochs: int + :ivar training_batch_size: The batch size for the training procedure. + :vartype training_batch_size: int + :ivar validation_batch_size: The batch size to be used during evaluation. + :vartype validation_batch_size: int + :ivar warmup_ratio: The warmup ratio, used alongside LrSchedulerType. + :vartype warmup_ratio: float + :ivar weight_decay: The weight decay for the training procedure. + :vartype weight_decay: float + """ + + _attribute_map = { + "gradient_accumulation_steps": {"key": "gradientAccumulationSteps", "type": "int"}, + "learning_rate": {"key": "learningRate", "type": "float"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "int"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "int"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "int"}, + "warmup_ratio": {"key": "warmupRatio", "type": "float"}, + "weight_decay": {"key": "weightDecay", "type": "float"}, + } + + def __init__( + self, + *, + gradient_accumulation_steps: Optional[int] = None, + learning_rate: Optional[float] = None, + learning_rate_scheduler: Optional[Union[str, "_models.NlpLearningRateScheduler"]] = None, + model_name: Optional[str] = None, + number_of_epochs: Optional[int] = None, + training_batch_size: Optional[int] = None, + validation_batch_size: Optional[int] = None, + warmup_ratio: Optional[float] = None, + weight_decay: Optional[float] = None, + **kwargs + ): + """ + :keyword gradient_accumulation_steps: Number of steps to accumulate gradients over before + running a backward pass. + :paramtype gradient_accumulation_steps: int + :keyword learning_rate: The learning rate for the training procedure. + :paramtype learning_rate: float + :keyword learning_rate_scheduler: The type of learning rate schedule to use during the training + procedure. Known values are: "None", "Linear", "Cosine", "CosineWithRestarts", "Polynomial", + "Constant", and "ConstantWithWarmup". + :paramtype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.NlpLearningRateScheduler + :keyword model_name: The name of the model to train. + :paramtype model_name: str + :keyword number_of_epochs: Number of training epochs. + :paramtype number_of_epochs: int + :keyword training_batch_size: The batch size for the training procedure. + :paramtype training_batch_size: int + :keyword validation_batch_size: The batch size to be used during evaluation. + :paramtype validation_batch_size: int + :keyword warmup_ratio: The warmup ratio, used alongside LrSchedulerType. + :paramtype warmup_ratio: float + :keyword weight_decay: The weight decay for the training procedure. + :paramtype weight_decay: float + """ + super().__init__(**kwargs) + self.gradient_accumulation_steps = gradient_accumulation_steps + self.learning_rate = learning_rate + self.learning_rate_scheduler = learning_rate_scheduler + self.model_name = model_name + self.number_of_epochs = number_of_epochs + self.training_batch_size = training_batch_size + self.validation_batch_size = validation_batch_size + self.warmup_ratio = warmup_ratio + self.weight_decay = weight_decay + + +class NlpParameterSubspace(_serialization.Model): + """Stringified search spaces for each parameter. See below examples. + + :ivar gradient_accumulation_steps: Number of steps to accumulate gradients over before running + a backward pass. + :vartype gradient_accumulation_steps: str + :ivar learning_rate: The learning rate for the training procedure. + :vartype learning_rate: str + :ivar learning_rate_scheduler: The type of learning rate schedule to use during the training + procedure. + :vartype learning_rate_scheduler: str + :ivar model_name: The name of the model to train. + :vartype model_name: str + :ivar number_of_epochs: Number of training epochs. + :vartype number_of_epochs: str + :ivar training_batch_size: The batch size for the training procedure. + :vartype training_batch_size: str + :ivar validation_batch_size: The batch size to be used during evaluation. + :vartype validation_batch_size: str + :ivar warmup_ratio: The warmup ratio, used alongside LrSchedulerType. + :vartype warmup_ratio: str + :ivar weight_decay: The weight decay for the training procedure. + :vartype weight_decay: str + """ + + _attribute_map = { + "gradient_accumulation_steps": {"key": "gradientAccumulationSteps", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "str"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "str"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "str"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "str"}, + "warmup_ratio": {"key": "warmupRatio", "type": "str"}, + "weight_decay": {"key": "weightDecay", "type": "str"}, + } + + def __init__( + self, + *, + gradient_accumulation_steps: Optional[str] = None, + learning_rate: Optional[str] = None, + learning_rate_scheduler: Optional[str] = None, + model_name: Optional[str] = None, + number_of_epochs: Optional[str] = None, + training_batch_size: Optional[str] = None, + validation_batch_size: Optional[str] = None, + warmup_ratio: Optional[str] = None, + weight_decay: Optional[str] = None, + **kwargs + ): + """ + :keyword gradient_accumulation_steps: Number of steps to accumulate gradients over before + running a backward pass. + :paramtype gradient_accumulation_steps: str + :keyword learning_rate: The learning rate for the training procedure. + :paramtype learning_rate: str + :keyword learning_rate_scheduler: The type of learning rate schedule to use during the training + procedure. + :paramtype learning_rate_scheduler: str + :keyword model_name: The name of the model to train. + :paramtype model_name: str + :keyword number_of_epochs: Number of training epochs. + :paramtype number_of_epochs: str + :keyword training_batch_size: The batch size for the training procedure. + :paramtype training_batch_size: str + :keyword validation_batch_size: The batch size to be used during evaluation. + :paramtype validation_batch_size: str + :keyword warmup_ratio: The warmup ratio, used alongside LrSchedulerType. + :paramtype warmup_ratio: str + :keyword weight_decay: The weight decay for the training procedure. + :paramtype weight_decay: str + """ + super().__init__(**kwargs) + self.gradient_accumulation_steps = gradient_accumulation_steps + self.learning_rate = learning_rate + self.learning_rate_scheduler = learning_rate_scheduler + self.model_name = model_name + self.number_of_epochs = number_of_epochs + self.training_batch_size = training_batch_size + self.validation_batch_size = validation_batch_size + self.warmup_ratio = warmup_ratio + self.weight_decay = weight_decay + + +class NlpSweepSettings(_serialization.Model): + """Model sweeping and hyperparameter tuning related settings. + + All required parameters must be populated in order to send to Azure. + + :ivar early_termination: Type of early termination policy for the sweeping job. + :vartype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy + :ivar sampling_algorithm: [Required] Type of sampling algorithm. Required. Known values are: + "Grid", "Random", and "Bayesian". + :vartype sampling_algorithm: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + """ + + _validation = { + "sampling_algorithm": {"required": True}, + } + + _attribute_map = { + "early_termination": {"key": "earlyTermination", "type": "EarlyTerminationPolicy"}, + "sampling_algorithm": {"key": "samplingAlgorithm", "type": "str"}, + } + + def __init__( + self, + *, + sampling_algorithm: Union[str, "_models.SamplingAlgorithmType"], + early_termination: Optional["_models.EarlyTerminationPolicy"] = None, + **kwargs + ): + """ + :keyword early_termination: Type of early termination policy for the sweeping job. + :paramtype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy + :keyword sampling_algorithm: [Required] Type of sampling algorithm. Required. Known values are: + "Grid", "Random", and "Bayesian". + :paramtype sampling_algorithm: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + """ + super().__init__(**kwargs) + self.early_termination = early_termination + self.sampling_algorithm = sampling_algorithm + + +class NlpVertical(_serialization.Model): + """Abstract class for NLP related AutoML tasks. + NLP - Natural Language Processing. + + :ivar featurization_settings: Featurization inputs needed for AutoML job. + :vartype featurization_settings: + ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :ivar fixed_parameters: Model/training parameters that will remain constant throughout + training. + :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters + :ivar limit_settings: Execution constraints for AutoMLJob. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] + :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + """ + + _attribute_map = { + "featurization_settings": {"key": "featurizationSettings", "type": "NlpVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "NlpFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "NlpVerticalLimitSettings"}, + "search_space": {"key": "searchSpace", "type": "[NlpParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "NlpSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + } + + def __init__( + self, + *, + featurization_settings: Optional["_models.NlpVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.NlpFixedParameters"] = None, + limit_settings: Optional["_models.NlpVerticalLimitSettings"] = None, + search_space: Optional[List["_models.NlpParameterSubspace"]] = None, + sweep_settings: Optional["_models.NlpSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + **kwargs + ): + """ + :keyword featurization_settings: Featurization inputs needed for AutoML job. + :paramtype featurization_settings: + ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :keyword fixed_parameters: Model/training parameters that will remain constant throughout + training. + :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters + :keyword limit_settings: Execution constraints for AutoMLJob. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] + :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + """ + super().__init__(**kwargs) + self.featurization_settings = featurization_settings + self.fixed_parameters = fixed_parameters + self.limit_settings = limit_settings + self.search_space = search_space + self.sweep_settings = sweep_settings + self.validation_data = validation_data + + +class NlpVerticalFeaturizationSettings(FeaturizationSettings): + """NlpVerticalFeaturizationSettings. + + :ivar dataset_language: Dataset language, useful for the text data. + :vartype dataset_language: str + """ + + _attribute_map = { + "dataset_language": {"key": "datasetLanguage", "type": "str"}, + } + + def __init__(self, *, dataset_language: Optional[str] = None, **kwargs): + """ + :keyword dataset_language: Dataset language, useful for the text data. + :paramtype dataset_language: str + """ + super().__init__(dataset_language=dataset_language, **kwargs) + + +class NlpVerticalLimitSettings(_serialization.Model): + """Job execution constraints. + + :ivar max_concurrent_trials: Maximum Concurrent AutoML iterations. + :vartype max_concurrent_trials: int + :ivar max_nodes: Maximum nodes to leverage for training in any single trial. Controls + multi-node distributed training. + :vartype max_nodes: int + :ivar max_trials: Number of AutoML iterations. + :vartype max_trials: int + :ivar timeout: AutoML job timeout. + :vartype timeout: ~datetime.timedelta + :ivar trial_timeout: Timeout for individual HD trials. + :vartype trial_timeout: ~datetime.timedelta + """ + + _attribute_map = { + "max_concurrent_trials": {"key": "maxConcurrentTrials", "type": "int"}, + "max_nodes": {"key": "maxNodes", "type": "int"}, + "max_trials": {"key": "maxTrials", "type": "int"}, + "timeout": {"key": "timeout", "type": "duration"}, + "trial_timeout": {"key": "trialTimeout", "type": "duration"}, + } + + def __init__( + self, + *, + max_concurrent_trials: int = 1, + max_nodes: int = 1, + max_trials: int = 1, + timeout: datetime.timedelta = "P7D", + trial_timeout: Optional[datetime.timedelta] = None, + **kwargs + ): + """ + :keyword max_concurrent_trials: Maximum Concurrent AutoML iterations. + :paramtype max_concurrent_trials: int + :keyword max_nodes: Maximum nodes to leverage for training in any single trial. Controls + multi-node distributed training. + :paramtype max_nodes: int + :keyword max_trials: Number of AutoML iterations. + :paramtype max_trials: int + :keyword timeout: AutoML job timeout. + :paramtype timeout: ~datetime.timedelta + :keyword trial_timeout: Timeout for individual HD trials. + :paramtype trial_timeout: ~datetime.timedelta + """ + super().__init__(**kwargs) + self.max_concurrent_trials = max_concurrent_trials + self.max_nodes = max_nodes + self.max_trials = max_trials + self.timeout = timeout + self.trial_timeout = trial_timeout + + +class NodeStateCounts(_serialization.Model): + """Counts of various compute node states on the amlCompute. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar idle_node_count: Number of compute nodes in idle state. + :vartype idle_node_count: int + :ivar running_node_count: Number of compute nodes which are running jobs. + :vartype running_node_count: int + :ivar preparing_node_count: Number of compute nodes which are being prepared. + :vartype preparing_node_count: int + :ivar unusable_node_count: Number of compute nodes which are in unusable state. + :vartype unusable_node_count: int + :ivar leaving_node_count: Number of compute nodes which are leaving the amlCompute. + :vartype leaving_node_count: int + :ivar preempted_node_count: Number of compute nodes which are in preempted state. + :vartype preempted_node_count: int + """ + + _validation = { + "idle_node_count": {"readonly": True}, + "running_node_count": {"readonly": True}, + "preparing_node_count": {"readonly": True}, + "unusable_node_count": {"readonly": True}, + "leaving_node_count": {"readonly": True}, + "preempted_node_count": {"readonly": True}, + } + + _attribute_map = { + "idle_node_count": {"key": "idleNodeCount", "type": "int"}, + "running_node_count": {"key": "runningNodeCount", "type": "int"}, + "preparing_node_count": {"key": "preparingNodeCount", "type": "int"}, + "unusable_node_count": {"key": "unusableNodeCount", "type": "int"}, + "leaving_node_count": {"key": "leavingNodeCount", "type": "int"}, + "preempted_node_count": {"key": "preemptedNodeCount", "type": "int"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.idle_node_count = None + self.running_node_count = None + self.preparing_node_count = None + self.unusable_node_count = None + self.leaving_node_count = None + self.preempted_node_count = None + + +class NoneAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): + """NoneAuthTypeWorkspaceConnectionProperties. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", and + "AccessKey". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. + :vartype category: str + :ivar target: + :vartype target: str + :ivar value: Value details of the workspace connection. + :vartype value: str + :ivar value_format: format for the workspace connection value. "JSON" + :vartype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + """ + + _validation = { + "auth_type": {"required": True}, + } + + _attribute_map = { + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "target": {"key": "target", "type": "str"}, + "value": {"key": "value", "type": "str"}, + "value_format": {"key": "valueFormat", "type": "str"}, + } + + def __init__( + self, + *, + category: Optional[str] = None, + target: Optional[str] = None, + value: Optional[str] = None, + value_format: Optional[Union[str, "_models.ValueFormat"]] = None, + **kwargs + ): + """ + :keyword category: Category of the connection. + :paramtype category: str + :keyword target: + :paramtype target: str + :keyword value: Value details of the workspace connection. + :paramtype value: str + :keyword value_format: format for the workspace connection value. "JSON" + :paramtype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + """ + super().__init__(category=category, target=target, value=value, value_format=value_format, **kwargs) + self.auth_type = "None" # type: str + + +class NoneDatastoreCredentials(DatastoreCredentials): + """Empty/none datastore credentials. + + All required parameters must be populated in order to send to Azure. + + :ivar credentials_type: [Required] Credential type used to authentication with storage. + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". + :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType + """ + + _validation = { + "credentials_type": {"required": True}, + } + + _attribute_map = { + "credentials_type": {"key": "credentialsType", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.credentials_type = "None" # type: str + + +class NotebookAccessTokenResult(_serialization.Model): + """NotebookAccessTokenResult. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar notebook_resource_id: + :vartype notebook_resource_id: str + :ivar host_name: + :vartype host_name: str + :ivar public_dns: + :vartype public_dns: str + :ivar access_token: + :vartype access_token: str + :ivar token_type: + :vartype token_type: str + :ivar expires_in: + :vartype expires_in: int + :ivar refresh_token: + :vartype refresh_token: str + :ivar scope: + :vartype scope: str + """ + + _validation = { + "notebook_resource_id": {"readonly": True}, + "host_name": {"readonly": True}, + "public_dns": {"readonly": True}, + "access_token": {"readonly": True}, + "token_type": {"readonly": True}, + "expires_in": {"readonly": True}, + "refresh_token": {"readonly": True}, + "scope": {"readonly": True}, + } + + _attribute_map = { + "notebook_resource_id": {"key": "notebookResourceId", "type": "str"}, + "host_name": {"key": "hostName", "type": "str"}, + "public_dns": {"key": "publicDns", "type": "str"}, + "access_token": {"key": "accessToken", "type": "str"}, + "token_type": {"key": "tokenType", "type": "str"}, + "expires_in": {"key": "expiresIn", "type": "int"}, + "refresh_token": {"key": "refreshToken", "type": "str"}, + "scope": {"key": "scope", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.notebook_resource_id = None + self.host_name = None + self.public_dns = None + self.access_token = None + self.token_type = None + self.expires_in = None + self.refresh_token = None + self.scope = None + + +class NotebookPreparationError(_serialization.Model): + """NotebookPreparationError. + + :ivar error_message: + :vartype error_message: str + :ivar status_code: + :vartype status_code: int + """ + + _attribute_map = { + "error_message": {"key": "errorMessage", "type": "str"}, + "status_code": {"key": "statusCode", "type": "int"}, + } + + def __init__(self, *, error_message: Optional[str] = None, status_code: Optional[int] = None, **kwargs): + """ + :keyword error_message: + :paramtype error_message: str + :keyword status_code: + :paramtype status_code: int + """ + super().__init__(**kwargs) + self.error_message = error_message + self.status_code = status_code + + +class NotebookResourceInfo(_serialization.Model): + """NotebookResourceInfo. + + :ivar fqdn: + :vartype fqdn: str + :ivar resource_id: the data plane resourceId that used to initialize notebook component. + :vartype resource_id: str + :ivar notebook_preparation_error: The error that occurs when preparing notebook. + :vartype notebook_preparation_error: + ~azure.mgmt.machinelearningservices.models.NotebookPreparationError + """ + + _attribute_map = { + "fqdn": {"key": "fqdn", "type": "str"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "notebook_preparation_error": {"key": "notebookPreparationError", "type": "NotebookPreparationError"}, + } + + def __init__( + self, + *, + fqdn: Optional[str] = None, + resource_id: Optional[str] = None, + notebook_preparation_error: Optional["_models.NotebookPreparationError"] = None, + **kwargs + ): + """ + :keyword fqdn: + :paramtype fqdn: str + :keyword resource_id: the data plane resourceId that used to initialize notebook component. + :paramtype resource_id: str + :keyword notebook_preparation_error: The error that occurs when preparing notebook. + :paramtype notebook_preparation_error: + ~azure.mgmt.machinelearningservices.models.NotebookPreparationError + """ + super().__init__(**kwargs) + self.fqdn = fqdn + self.resource_id = resource_id + self.notebook_preparation_error = notebook_preparation_error + + +class Objective(_serialization.Model): + """Optimization objective. + + All required parameters must be populated in order to send to Azure. + + :ivar goal: [Required] Defines supported metric goals for hyperparameter tuning. Required. + Known values are: "Minimize" and "Maximize". + :vartype goal: str or ~azure.mgmt.machinelearningservices.models.Goal + :ivar primary_metric: [Required] Name of the metric to optimize. Required. + :vartype primary_metric: str + """ + + _validation = { + "goal": {"required": True}, + "primary_metric": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "goal": {"key": "goal", "type": "str"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, + } + + def __init__(self, *, goal: Union[str, "_models.Goal"], primary_metric: str, **kwargs): + """ + :keyword goal: [Required] Defines supported metric goals for hyperparameter tuning. Required. + Known values are: "Minimize" and "Maximize". + :paramtype goal: str or ~azure.mgmt.machinelearningservices.models.Goal + :keyword primary_metric: [Required] Name of the metric to optimize. Required. + :paramtype primary_metric: str + """ + super().__init__(**kwargs) + self.goal = goal + self.primary_metric = primary_metric + + +class OnlineDeployment(TrackedResource): + """OnlineDeployment. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar identity: Managed service identity (system assigned and/or user assigned identities). + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :vartype kind: str + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.OnlineDeploymentProperties + :ivar sku: Sku details required for ARM contract for Autoscaling. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "OnlineDeploymentProperties"}, + "sku": {"key": "sku", "type": "Sku"}, + } + + def __init__( + self, + *, + location: str, + properties: "_models.OnlineDeploymentProperties", + tags: Optional[Dict[str, str]] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, + kind: Optional[str] = None, + sku: Optional["_models.Sku"] = None, + **kwargs + ): + """ + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + :keyword location: The geo-location where the resource lives. Required. + :paramtype location: str + :keyword identity: Managed service identity (system assigned and/or user assigned identities). + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :paramtype kind: str + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.OnlineDeploymentProperties + :keyword sku: Sku details required for ARM contract for Autoscaling. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + super().__init__(tags=tags, location=location, **kwargs) + self.identity = identity + self.kind = kind + self.properties = properties + self.sku = sku + + +class OnlineDeploymentTrackedResourceArmPaginatedResult(_serialization.Model): + """A paginated list of OnlineDeployment entities. + + :ivar next_link: The link to the next page of OnlineDeployment objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type OnlineDeployment. + :vartype value: list[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[OnlineDeployment]"}, + } + + def __init__( + self, *, next_link: Optional[str] = None, value: Optional[List["_models.OnlineDeployment"]] = None, **kwargs + ): + """ + :keyword next_link: The link to the next page of OnlineDeployment objects. If null, there are + no additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type OnlineDeployment. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class OnlineEndpoint(TrackedResource): + """OnlineEndpoint. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar identity: Managed service identity (system assigned and/or user assigned identities). + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :vartype kind: str + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.OnlineEndpointProperties + :ivar sku: Sku details required for ARM contract for Autoscaling. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "OnlineEndpointProperties"}, + "sku": {"key": "sku", "type": "Sku"}, + } + + def __init__( + self, + *, + location: str, + properties: "_models.OnlineEndpointProperties", + tags: Optional[Dict[str, str]] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, + kind: Optional[str] = None, + sku: Optional["_models.Sku"] = None, + **kwargs + ): + """ + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + :keyword location: The geo-location where the resource lives. Required. + :paramtype location: str + :keyword identity: Managed service identity (system assigned and/or user assigned identities). + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :paramtype kind: str + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.OnlineEndpointProperties + :keyword sku: Sku details required for ARM contract for Autoscaling. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + super().__init__(tags=tags, location=location, **kwargs) + self.identity = identity + self.kind = kind + self.properties = properties + self.sku = sku + + +class OnlineEndpointProperties(EndpointPropertiesBase): # pylint: disable=too-many-instance-attributes + """Online endpoint configuration. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_mode: [Required] Use 'Key' for key based authentication and 'AMLToken' for Azure + Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. + Required. Known values are: "AMLToken", "Key", and "AADToken". + :vartype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode + :ivar description: Description of the inference endpoint. + :vartype description: str + :ivar keys: EndpointAuthKeys to set initially on an Endpoint. + This property will always be returned as null. AuthKey values must be retrieved using the + ListKeys API. + :vartype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar scoring_uri: Endpoint URI. + :vartype scoring_uri: str + :ivar swagger_uri: Endpoint Swagger URI. + :vartype swagger_uri: str + :ivar compute: ARM resource ID of the compute if it exists. + optional. + :vartype compute: str + :ivar mirror_traffic: Percentage of traffic to be mirrored to each deployment without using + returned scoring. Traffic values need to sum to utmost 50. + :vartype mirror_traffic: dict[str, int] + :ivar provisioning_state: Provisioning state for the endpoint. Known values are: "Creating", + "Deleting", "Succeeded", "Failed", "Updating", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.EndpointProvisioningState + :ivar public_network_access: Set to "Enabled" for endpoints that should allow public access + when Private Link is enabled. Known values are: "Enabled" and "Disabled". + :vartype public_network_access: str or + ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType + :ivar traffic: Percentage of traffic from endpoint to divert to each deployment. Traffic values + need to sum to 100. + :vartype traffic: dict[str, int] + """ + + _validation = { + "auth_mode": {"required": True}, + "scoring_uri": {"readonly": True}, + "swagger_uri": {"readonly": True}, + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "auth_mode": {"key": "authMode", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "keys": {"key": "keys", "type": "EndpointAuthKeys"}, + "properties": {"key": "properties", "type": "{str}"}, + "scoring_uri": {"key": "scoringUri", "type": "str"}, + "swagger_uri": {"key": "swaggerUri", "type": "str"}, + "compute": {"key": "compute", "type": "str"}, + "mirror_traffic": {"key": "mirrorTraffic", "type": "{int}"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "public_network_access": {"key": "publicNetworkAccess", "type": "str"}, + "traffic": {"key": "traffic", "type": "{int}"}, + } + + def __init__( + self, + *, + auth_mode: Union[str, "_models.EndpointAuthMode"], + description: Optional[str] = None, + keys: Optional["_models.EndpointAuthKeys"] = None, + properties: Optional[Dict[str, str]] = None, + compute: Optional[str] = None, + mirror_traffic: Optional[Dict[str, int]] = None, + public_network_access: Optional[Union[str, "_models.PublicNetworkAccessType"]] = None, + traffic: Optional[Dict[str, int]] = None, + **kwargs + ): + """ + :keyword auth_mode: [Required] Use 'Key' for key based authentication and 'AMLToken' for Azure + Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. + Required. Known values are: "AMLToken", "Key", and "AADToken". + :paramtype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode + :keyword description: Description of the inference endpoint. + :paramtype description: str + :keyword keys: EndpointAuthKeys to set initially on an Endpoint. + This property will always be returned as null. AuthKey values must be retrieved using the + ListKeys API. + :paramtype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword compute: ARM resource ID of the compute if it exists. + optional. + :paramtype compute: str + :keyword mirror_traffic: Percentage of traffic to be mirrored to each deployment without using + returned scoring. Traffic values need to sum to utmost 50. + :paramtype mirror_traffic: dict[str, int] + :keyword public_network_access: Set to "Enabled" for endpoints that should allow public access + when Private Link is enabled. Known values are: "Enabled" and "Disabled". + :paramtype public_network_access: str or + ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType + :keyword traffic: Percentage of traffic from endpoint to divert to each deployment. Traffic + values need to sum to 100. + :paramtype traffic: dict[str, int] + """ + super().__init__(auth_mode=auth_mode, description=description, keys=keys, properties=properties, **kwargs) + self.compute = compute + self.mirror_traffic = mirror_traffic + self.provisioning_state = None + self.public_network_access = public_network_access + self.traffic = traffic + + +class OnlineEndpointTrackedResourceArmPaginatedResult(_serialization.Model): + """A paginated list of OnlineEndpoint entities. + + :ivar next_link: The link to the next page of OnlineEndpoint objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type OnlineEndpoint. + :vartype value: list[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[OnlineEndpoint]"}, + } + + def __init__( + self, *, next_link: Optional[str] = None, value: Optional[List["_models.OnlineEndpoint"]] = None, **kwargs + ): + """ + :keyword next_link: The link to the next page of OnlineEndpoint objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type OnlineEndpoint. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class OnlineRequestSettings(_serialization.Model): + """Online deployment scoring requests configuration. + + :ivar max_concurrent_requests_per_instance: The number of maximum concurrent requests per node + allowed per deployment. Defaults to 1. + :vartype max_concurrent_requests_per_instance: int + :ivar max_queue_wait: The maximum amount of time a request will stay in the queue in ISO 8601 + format. + Defaults to 500ms. + :vartype max_queue_wait: ~datetime.timedelta + :ivar request_timeout: The scoring timeout in ISO 8601 format. + Defaults to 5000ms. + :vartype request_timeout: ~datetime.timedelta + """ + + _attribute_map = { + "max_concurrent_requests_per_instance": {"key": "maxConcurrentRequestsPerInstance", "type": "int"}, + "max_queue_wait": {"key": "maxQueueWait", "type": "duration"}, + "request_timeout": {"key": "requestTimeout", "type": "duration"}, + } + + def __init__( + self, + *, + max_concurrent_requests_per_instance: int = 1, + max_queue_wait: datetime.timedelta = "PT0.5S", + request_timeout: datetime.timedelta = "PT5S", + **kwargs + ): + """ + :keyword max_concurrent_requests_per_instance: The number of maximum concurrent requests per + node allowed per deployment. Defaults to 1. + :paramtype max_concurrent_requests_per_instance: int + :keyword max_queue_wait: The maximum amount of time a request will stay in the queue in ISO + 8601 format. + Defaults to 500ms. + :paramtype max_queue_wait: ~datetime.timedelta + :keyword request_timeout: The scoring timeout in ISO 8601 format. + Defaults to 5000ms. + :paramtype request_timeout: ~datetime.timedelta + """ + super().__init__(**kwargs) + self.max_concurrent_requests_per_instance = max_concurrent_requests_per_instance + self.max_queue_wait = max_queue_wait + self.request_timeout = request_timeout + + +class OutputPathAssetReference(AssetReferenceBase): + """Reference to an asset via its path in a job output. + + All required parameters must be populated in order to send to Azure. + + :ivar reference_type: [Required] Specifies the type of asset reference. Required. Known values + are: "Id", "DataPath", and "OutputPath". + :vartype reference_type: str or ~azure.mgmt.machinelearningservices.models.ReferenceType + :ivar job_id: ARM resource ID of the job. + :vartype job_id: str + :ivar path: The path of the file/directory in the job output. + :vartype path: str + """ + + _validation = { + "reference_type": {"required": True}, + } + + _attribute_map = { + "reference_type": {"key": "referenceType", "type": "str"}, + "job_id": {"key": "jobId", "type": "str"}, + "path": {"key": "path", "type": "str"}, + } + + def __init__(self, *, job_id: Optional[str] = None, path: Optional[str] = None, **kwargs): + """ + :keyword job_id: ARM resource ID of the job. + :paramtype job_id: str + :keyword path: The path of the file/directory in the job output. + :paramtype path: str + """ + super().__init__(**kwargs) + self.reference_type = "OutputPath" # type: str + self.job_id = job_id + self.path = path + + +class PaginatedComputeResourcesList(_serialization.Model): + """Paginated list of Machine Learning compute objects wrapped in ARM resource envelope. + + :ivar value: An array of Machine Learning compute objects wrapped in ARM resource envelope. + :vartype value: list[~azure.mgmt.machinelearningservices.models.ComputeResource] + :ivar next_link: A continuation link (absolute URI) to the next page of results in the list. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[ComputeResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, *, value: Optional[List["_models.ComputeResource"]] = None, next_link: Optional[str] = None, **kwargs + ): + """ + :keyword value: An array of Machine Learning compute objects wrapped in ARM resource envelope. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.ComputeResource] + :keyword next_link: A continuation link (absolute URI) to the next page of results in the list. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class PartialBatchDeployment(_serialization.Model): + """Mutable batch inference settings per deployment. + + :ivar description: Description of the endpoint deployment. + :vartype description: str + """ + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + } + + def __init__(self, *, description: Optional[str] = None, **kwargs): + """ + :keyword description: Description of the endpoint deployment. + :paramtype description: str + """ + super().__init__(**kwargs) + self.description = description + + +class PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties(_serialization.Model): + """Strictly used in update requests. + + :ivar properties: Additional attributes of the entity. + :vartype properties: ~azure.mgmt.machinelearningservices.models.PartialBatchDeployment + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + """ + + _attribute_map = { + "properties": {"key": "properties", "type": "PartialBatchDeployment"}, + "tags": {"key": "tags", "type": "{str}"}, + } + + def __init__( + self, + *, + properties: Optional["_models.PartialBatchDeployment"] = None, + tags: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword properties: Additional attributes of the entity. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.PartialBatchDeployment + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + """ + super().__init__(**kwargs) + self.properties = properties + self.tags = tags + + +class PartialManagedServiceIdentity(_serialization.Model): + """Managed service identity (system assigned and/or user assigned identities). + + :ivar type: Managed service identity (system assigned and/or user assigned identities). Known + values are: "None", "SystemAssigned", "UserAssigned", and "SystemAssigned,UserAssigned". + :vartype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType + :ivar user_assigned_identities: The set of user assigned identities associated with the + resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. + The dictionary values can be empty objects ({}) in requests. + :vartype user_assigned_identities: dict[str, JSON] + """ + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "user_assigned_identities": {"key": "userAssignedIdentities", "type": "{object}"}, + } + + def __init__( + self, + *, + type: Optional[Union[str, "_models.ManagedServiceIdentityType"]] = None, + user_assigned_identities: Optional[Dict[str, JSON]] = None, + **kwargs + ): + """ + :keyword type: Managed service identity (system assigned and/or user assigned identities). + Known values are: "None", "SystemAssigned", "UserAssigned", and "SystemAssigned,UserAssigned". + :paramtype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType + :keyword user_assigned_identities: The set of user assigned identities associated with the + resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. + The dictionary values can be empty objects ({}) in requests. + :paramtype user_assigned_identities: dict[str, JSON] + """ + super().__init__(**kwargs) + self.type = type + self.user_assigned_identities = user_assigned_identities + + +class PartialMinimalTrackedResource(_serialization.Model): + """Strictly used in update requests. + + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + """ + + _attribute_map = { + "tags": {"key": "tags", "type": "{str}"}, + } + + def __init__(self, *, tags: Optional[Dict[str, str]] = None, **kwargs): + """ + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + """ + super().__init__(**kwargs) + self.tags = tags + + +class PartialMinimalTrackedResourceWithIdentity(PartialMinimalTrackedResource): + """Strictly used in update requests. + + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar identity: Managed service identity (system assigned and/or user assigned identities). + :vartype identity: ~azure.mgmt.machinelearningservices.models.PartialManagedServiceIdentity + """ + + _attribute_map = { + "tags": {"key": "tags", "type": "{str}"}, + "identity": {"key": "identity", "type": "PartialManagedServiceIdentity"}, + } + + def __init__( + self, + *, + tags: Optional[Dict[str, str]] = None, + identity: Optional["_models.PartialManagedServiceIdentity"] = None, + **kwargs + ): + """ + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + :keyword identity: Managed service identity (system assigned and/or user assigned identities). + :paramtype identity: ~azure.mgmt.machinelearningservices.models.PartialManagedServiceIdentity + """ + super().__init__(tags=tags, **kwargs) + self.identity = identity + + +class PartialMinimalTrackedResourceWithSku(PartialMinimalTrackedResource): + """Strictly used in update requests. + + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar sku: Sku details required for ARM contract for Autoscaling. + :vartype sku: ~azure.mgmt.machinelearningservices.models.PartialSku + """ + + _attribute_map = { + "tags": {"key": "tags", "type": "{str}"}, + "sku": {"key": "sku", "type": "PartialSku"}, + } + + def __init__(self, *, tags: Optional[Dict[str, str]] = None, sku: Optional["_models.PartialSku"] = None, **kwargs): + """ + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + :keyword sku: Sku details required for ARM contract for Autoscaling. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.PartialSku + """ + super().__init__(tags=tags, **kwargs) + self.sku = sku + + +class PartialRegistryPartialTrackedResource(_serialization.Model): + """Strictly used in update requests. + + :ivar identity: Managed service identity (system assigned and/or user assigned identities). + :vartype identity: ~azure.mgmt.machinelearningservices.models.PartialManagedServiceIdentity + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :vartype kind: str + :ivar properties: Additional attributes of the entity. + :vartype properties: JSON + :ivar sku: Sku details required for ARM contract for Autoscaling. + :vartype sku: ~azure.mgmt.machinelearningservices.models.PartialSku + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + """ + + _attribute_map = { + "identity": {"key": "identity", "type": "PartialManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "object"}, + "sku": {"key": "sku", "type": "PartialSku"}, + "tags": {"key": "tags", "type": "{str}"}, + } + + def __init__( + self, + *, + identity: Optional["_models.PartialManagedServiceIdentity"] = None, + kind: Optional[str] = None, + properties: Optional[JSON] = None, + sku: Optional["_models.PartialSku"] = None, + tags: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword identity: Managed service identity (system assigned and/or user assigned identities). + :paramtype identity: ~azure.mgmt.machinelearningservices.models.PartialManagedServiceIdentity + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :paramtype kind: str + :keyword properties: Additional attributes of the entity. + :paramtype properties: JSON + :keyword sku: Sku details required for ARM contract for Autoscaling. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.PartialSku + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + """ + super().__init__(**kwargs) + self.identity = identity + self.kind = kind + self.properties = properties + self.sku = sku + self.tags = tags + + +class PartialSku(_serialization.Model): + """Common SKU definition. + + :ivar capacity: If the SKU supports scale out/in then the capacity integer should be included. + If scale out/in is not possible for the resource this may be omitted. + :vartype capacity: int + :ivar family: If the service has different generations of hardware, for the same SKU, then that + can be captured here. + :vartype family: str + :ivar name: The name of the SKU. Ex - P3. It is typically a letter+number code. + :vartype name: str + :ivar size: The SKU size. When the name field is the combination of tier and some other value, + this would be the standalone code. + :vartype size: str + :ivar tier: This field is required to be implemented by the Resource Provider if the service + has more than one tier, but is not required on a PUT. Known values are: "Free", "Basic", + "Standard", and "Premium". + :vartype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier + """ + + _attribute_map = { + "capacity": {"key": "capacity", "type": "int"}, + "family": {"key": "family", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "size": {"key": "size", "type": "str"}, + "tier": {"key": "tier", "type": "str"}, + } + + def __init__( + self, + *, + capacity: Optional[int] = None, + family: Optional[str] = None, + name: Optional[str] = None, + size: Optional[str] = None, + tier: Optional[Union[str, "_models.SkuTier"]] = None, + **kwargs + ): + """ + :keyword capacity: If the SKU supports scale out/in then the capacity integer should be + included. If scale out/in is not possible for the resource this may be omitted. + :paramtype capacity: int + :keyword family: If the service has different generations of hardware, for the same SKU, then + that can be captured here. + :paramtype family: str + :keyword name: The name of the SKU. Ex - P3. It is typically a letter+number code. + :paramtype name: str + :keyword size: The SKU size. When the name field is the combination of tier and some other + value, this would be the standalone code. + :paramtype size: str + :keyword tier: This field is required to be implemented by the Resource Provider if the service + has more than one tier, but is not required on a PUT. Known values are: "Free", "Basic", + "Standard", and "Premium". + :paramtype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier + """ + super().__init__(**kwargs) + self.capacity = capacity + self.family = family + self.name = name + self.size = size + self.tier = tier + + +class Password(_serialization.Model): + """Password. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: + :vartype name: str + :ivar value: + :vartype value: str + """ + + _validation = { + "name": {"readonly": True}, + "value": {"readonly": True}, + } + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "value": {"key": "value", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.name = None + self.value = None + + +class PATAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): + """PATAuthTypeWorkspaceConnectionProperties. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", and + "AccessKey". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. + :vartype category: str + :ivar target: + :vartype target: str + :ivar value: Value details of the workspace connection. + :vartype value: str + :ivar value_format: format for the workspace connection value. "JSON" + :vartype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :ivar credentials: + :vartype credentials: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPersonalAccessToken + """ + + _validation = { + "auth_type": {"required": True}, + } + + _attribute_map = { + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "target": {"key": "target", "type": "str"}, + "value": {"key": "value", "type": "str"}, + "value_format": {"key": "valueFormat", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionPersonalAccessToken"}, + } + + def __init__( + self, + *, + category: Optional[str] = None, + target: Optional[str] = None, + value: Optional[str] = None, + value_format: Optional[Union[str, "_models.ValueFormat"]] = None, + credentials: Optional["_models.WorkspaceConnectionPersonalAccessToken"] = None, + **kwargs + ): + """ + :keyword category: Category of the connection. + :paramtype category: str + :keyword target: + :paramtype target: str + :keyword value: Value details of the workspace connection. + :paramtype value: str + :keyword value_format: format for the workspace connection value. "JSON" + :paramtype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :keyword credentials: + :paramtype credentials: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPersonalAccessToken + """ + super().__init__(category=category, target=target, value=value, value_format=value_format, **kwargs) + self.auth_type = "PAT" # type: str + self.credentials = credentials + + +class PersonalComputeInstanceSettings(_serialization.Model): + """Settings for a personal compute instance. + + :ivar assigned_user: A user explicitly assigned to a personal compute instance. + :vartype assigned_user: ~azure.mgmt.machinelearningservices.models.AssignedUser + """ + + _attribute_map = { + "assigned_user": {"key": "assignedUser", "type": "AssignedUser"}, + } + + def __init__(self, *, assigned_user: Optional["_models.AssignedUser"] = None, **kwargs): + """ + :keyword assigned_user: A user explicitly assigned to a personal compute instance. + :paramtype assigned_user: ~azure.mgmt.machinelearningservices.models.AssignedUser + """ + super().__init__(**kwargs) + self.assigned_user = assigned_user + + +class PipelineJob(JobBaseProperties): # pylint: disable=too-many-instance-attributes + """Pipeline Job definition: defines generic to MFE attributes. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar component_id: ARM resource ID of the component resource. + :vartype component_id: str + :ivar compute_id: ARM resource ID of the compute resource. + :vartype compute_id: str + :ivar display_name: Display name of job. + :vartype display_name: str + :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :vartype experiment_name: str + :ivar identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar job_type: [Required] Specifies the type of job. Required. Known values are: "AutoML", + "Command", "Labeling", "Sweep", "Pipeline", and "Spark". + :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :ivar services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", and "Scheduled". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus + :ivar inputs: Inputs for the pipeline job. + :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] + :ivar jobs: Jobs construct the Pipeline Job. + :vartype jobs: dict[str, JSON] + :ivar outputs: Outputs for the pipeline job. + :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :ivar settings: Pipeline settings, for things like ContinueRunOnStepFailure etc. + :vartype settings: JSON + :ivar source_job_id: ARM resource ID of source job. + :vartype source_job_id: str + """ + + _validation = { + "job_type": {"required": True}, + "status": {"readonly": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, + "inputs": {"key": "inputs", "type": "{JobInput}"}, + "jobs": {"key": "jobs", "type": "{object}"}, + "outputs": {"key": "outputs", "type": "{JobOutput}"}, + "settings": {"key": "settings", "type": "object"}, + "source_job_id": {"key": "sourceJobId", "type": "str"}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + component_id: Optional[str] = None, + compute_id: Optional[str] = None, + display_name: Optional[str] = None, + experiment_name: str = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: bool = False, + services: Optional[Dict[str, "_models.JobService"]] = None, + inputs: Optional[Dict[str, "_models.JobInput"]] = None, + jobs: Optional[Dict[str, JSON]] = None, + outputs: Optional[Dict[str, "_models.JobOutput"]] = None, + settings: Optional[JSON] = None, + source_job_id: Optional[str] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword component_id: ARM resource ID of the component resource. + :paramtype component_id: str + :keyword compute_id: ARM resource ID of the compute resource. + :paramtype compute_id: str + :keyword display_name: Display name of job. + :paramtype display_name: str + :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :paramtype experiment_name: str + :keyword identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :keyword inputs: Inputs for the pipeline job. + :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] + :keyword jobs: Jobs construct the Pipeline Job. + :paramtype jobs: dict[str, JSON] + :keyword outputs: Outputs for the pipeline job. + :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :keyword settings: Pipeline settings, for things like ContinueRunOnStepFailure etc. + :paramtype settings: JSON + :keyword source_job_id: ARM resource ID of source job. + :paramtype source_job_id: str + """ + super().__init__( + description=description, + properties=properties, + tags=tags, + component_id=component_id, + compute_id=compute_id, + display_name=display_name, + experiment_name=experiment_name, + identity=identity, + is_archived=is_archived, + services=services, + **kwargs + ) + self.job_type = "Pipeline" # type: str + self.inputs = inputs + self.jobs = jobs + self.outputs = outputs + self.settings = settings + self.source_job_id = source_job_id + + +class PrivateEndpoint(_serialization.Model): + """The Private Endpoint resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The ARM identifier for Private Endpoint. + :vartype id: str + :ivar subnet_arm_id: The ARM identifier for Subnet resource that private endpoint links to. + :vartype subnet_arm_id: str + """ + + _validation = { + "id": {"readonly": True}, + "subnet_arm_id": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "subnet_arm_id": {"key": "subnetArmId", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.id = None + self.subnet_arm_id = None + + +class PrivateEndpointConnection(Resource): # pylint: disable=too-many-instance-attributes + """The Private Endpoint Connection resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar identity: The identity of the resource. + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar location: Specifies the location of the resource. + :vartype location: str + :ivar tags: Contains resource tags defined as key/value pairs. + :vartype tags: dict[str, str] + :ivar sku: The sku of the workspace. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + :ivar private_endpoint: The resource of private end point. + :vartype private_endpoint: ~azure.mgmt.machinelearningservices.models.PrivateEndpoint + :ivar private_link_service_connection_state: A collection of information about the state of the + connection between service consumer and provider. + :vartype private_link_service_connection_state: + ~azure.mgmt.machinelearningservices.models.PrivateLinkServiceConnectionState + :ivar provisioning_state: The provisioning state of the private endpoint connection resource. + Known values are: "Succeeded", "Creating", "Deleting", and "Failed". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnectionProvisioningState + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "location": {"key": "location", "type": "str"}, + "tags": {"key": "tags", "type": "{str}"}, + "sku": {"key": "sku", "type": "Sku"}, + "private_endpoint": {"key": "properties.privateEndpoint", "type": "PrivateEndpoint"}, + "private_link_service_connection_state": { + "key": "properties.privateLinkServiceConnectionState", + "type": "PrivateLinkServiceConnectionState", + }, + "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, + } + + def __init__( + self, + *, + identity: Optional["_models.ManagedServiceIdentity"] = None, + location: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + sku: Optional["_models.Sku"] = None, + private_endpoint: Optional["_models.PrivateEndpoint"] = None, + private_link_service_connection_state: Optional["_models.PrivateLinkServiceConnectionState"] = None, + **kwargs + ): + """ + :keyword identity: The identity of the resource. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword location: Specifies the location of the resource. + :paramtype location: str + :keyword tags: Contains resource tags defined as key/value pairs. + :paramtype tags: dict[str, str] + :keyword sku: The sku of the workspace. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + :keyword private_endpoint: The resource of private end point. + :paramtype private_endpoint: ~azure.mgmt.machinelearningservices.models.PrivateEndpoint + :keyword private_link_service_connection_state: A collection of information about the state of + the connection between service consumer and provider. + :paramtype private_link_service_connection_state: + ~azure.mgmt.machinelearningservices.models.PrivateLinkServiceConnectionState + """ + super().__init__(**kwargs) + self.identity = identity + self.location = location + self.tags = tags + self.sku = sku + self.private_endpoint = private_endpoint + self.private_link_service_connection_state = private_link_service_connection_state + self.provisioning_state = None + + +class PrivateEndpointConnectionListResult(_serialization.Model): + """List of private endpoint connection associated with the specified workspace. + + :ivar value: Array of private endpoint connections. + :vartype value: list[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection] + """ + + _attribute_map = { + "value": {"key": "value", "type": "[PrivateEndpointConnection]"}, + } + + def __init__(self, *, value: Optional[List["_models.PrivateEndpointConnection"]] = None, **kwargs): + """ + :keyword value: Array of private endpoint connections. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection] + """ + super().__init__(**kwargs) + self.value = value + + +class PrivateLinkResource(Resource): # pylint: disable=too-many-instance-attributes + """A private link resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar identity: The identity of the resource. + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar location: Specifies the location of the resource. + :vartype location: str + :ivar tags: Contains resource tags defined as key/value pairs. + :vartype tags: dict[str, str] + :ivar sku: The sku of the workspace. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + :ivar group_id: The private link resource group id. + :vartype group_id: str + :ivar required_members: The private link resource required member names. + :vartype required_members: list[str] + :ivar required_zone_names: The private link resource Private link DNS zone name. + :vartype required_zone_names: list[str] + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "group_id": {"readonly": True}, + "required_members": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "location": {"key": "location", "type": "str"}, + "tags": {"key": "tags", "type": "{str}"}, + "sku": {"key": "sku", "type": "Sku"}, + "group_id": {"key": "properties.groupId", "type": "str"}, + "required_members": {"key": "properties.requiredMembers", "type": "[str]"}, + "required_zone_names": {"key": "properties.requiredZoneNames", "type": "[str]"}, + } + + def __init__( + self, + *, + identity: Optional["_models.ManagedServiceIdentity"] = None, + location: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + sku: Optional["_models.Sku"] = None, + required_zone_names: Optional[List[str]] = None, + **kwargs + ): + """ + :keyword identity: The identity of the resource. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword location: Specifies the location of the resource. + :paramtype location: str + :keyword tags: Contains resource tags defined as key/value pairs. + :paramtype tags: dict[str, str] + :keyword sku: The sku of the workspace. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + :keyword required_zone_names: The private link resource Private link DNS zone name. + :paramtype required_zone_names: list[str] + """ + super().__init__(**kwargs) + self.identity = identity + self.location = location + self.tags = tags + self.sku = sku + self.group_id = None + self.required_members = None + self.required_zone_names = required_zone_names + + +class PrivateLinkResourceListResult(_serialization.Model): + """A list of private link resources. + + :ivar value: Array of private link resources. + :vartype value: list[~azure.mgmt.machinelearningservices.models.PrivateLinkResource] + """ + + _attribute_map = { + "value": {"key": "value", "type": "[PrivateLinkResource]"}, + } + + def __init__(self, *, value: Optional[List["_models.PrivateLinkResource"]] = None, **kwargs): + """ + :keyword value: Array of private link resources. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.PrivateLinkResource] + """ + super().__init__(**kwargs) + self.value = value + + +class PrivateLinkServiceConnectionState(_serialization.Model): + """A collection of information about the state of the connection between service consumer and provider. + + :ivar status: Indicates whether the connection has been Approved/Rejected/Removed by the owner + of the service. Known values are: "Pending", "Approved", "Rejected", "Disconnected", and + "Timeout". + :vartype status: str or + ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus + :ivar description: The reason for approval/rejection of the connection. + :vartype description: str + :ivar actions_required: A message indicating if changes on the service provider require any + updates on the consumer. + :vartype actions_required: str + """ + + _attribute_map = { + "status": {"key": "status", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "actions_required": {"key": "actionsRequired", "type": "str"}, + } + + def __init__( + self, + *, + status: Optional[Union[str, "_models.PrivateEndpointServiceConnectionStatus"]] = None, + description: Optional[str] = None, + actions_required: Optional[str] = None, + **kwargs + ): + """ + :keyword status: Indicates whether the connection has been Approved/Rejected/Removed by the + owner of the service. Known values are: "Pending", "Approved", "Rejected", "Disconnected", and + "Timeout". + :paramtype status: str or + ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus + :keyword description: The reason for approval/rejection of the connection. + :paramtype description: str + :keyword actions_required: A message indicating if changes on the service provider require any + updates on the consumer. + :paramtype actions_required: str + """ + super().__init__(**kwargs) + self.status = status + self.description = description + self.actions_required = actions_required + + +class ProbeSettings(_serialization.Model): + """Deployment container liveness/readiness probe configuration. + + :ivar failure_threshold: The number of failures to allow before returning an unhealthy status. + :vartype failure_threshold: int + :ivar initial_delay: The delay before the first probe in ISO 8601 format. + :vartype initial_delay: ~datetime.timedelta + :ivar period: The length of time between probes in ISO 8601 format. + :vartype period: ~datetime.timedelta + :ivar success_threshold: The number of successful probes before returning a healthy status. + :vartype success_threshold: int + :ivar timeout: The probe timeout in ISO 8601 format. + :vartype timeout: ~datetime.timedelta + """ + + _attribute_map = { + "failure_threshold": {"key": "failureThreshold", "type": "int"}, + "initial_delay": {"key": "initialDelay", "type": "duration"}, + "period": {"key": "period", "type": "duration"}, + "success_threshold": {"key": "successThreshold", "type": "int"}, + "timeout": {"key": "timeout", "type": "duration"}, + } + + def __init__( + self, + *, + failure_threshold: int = 30, + initial_delay: Optional[datetime.timedelta] = None, + period: datetime.timedelta = "PT10S", + success_threshold: int = 1, + timeout: datetime.timedelta = "PT2S", + **kwargs + ): + """ + :keyword failure_threshold: The number of failures to allow before returning an unhealthy + status. + :paramtype failure_threshold: int + :keyword initial_delay: The delay before the first probe in ISO 8601 format. + :paramtype initial_delay: ~datetime.timedelta + :keyword period: The length of time between probes in ISO 8601 format. + :paramtype period: ~datetime.timedelta + :keyword success_threshold: The number of successful probes before returning a healthy status. + :paramtype success_threshold: int + :keyword timeout: The probe timeout in ISO 8601 format. + :paramtype timeout: ~datetime.timedelta + """ + super().__init__(**kwargs) + self.failure_threshold = failure_threshold + self.initial_delay = initial_delay + self.period = period + self.success_threshold = success_threshold + self.timeout = timeout + + +class ProgressMetrics(_serialization.Model): + """Progress metrics definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar completed_datapoint_count: The completed datapoint count. + :vartype completed_datapoint_count: int + :ivar incremental_data_last_refresh_date_time: The time of last successful incremental data + refresh in UTC. + :vartype incremental_data_last_refresh_date_time: ~datetime.datetime + :ivar skipped_datapoint_count: The skipped datapoint count. + :vartype skipped_datapoint_count: int + :ivar total_datapoint_count: The total datapoint count. + :vartype total_datapoint_count: int + """ + + _validation = { + "completed_datapoint_count": {"readonly": True}, + "incremental_data_last_refresh_date_time": {"readonly": True}, + "skipped_datapoint_count": {"readonly": True}, + "total_datapoint_count": {"readonly": True}, + } + + _attribute_map = { + "completed_datapoint_count": {"key": "completedDatapointCount", "type": "int"}, + "incremental_data_last_refresh_date_time": {"key": "incrementalDataLastRefreshDateTime", "type": "iso-8601"}, + "skipped_datapoint_count": {"key": "skippedDatapointCount", "type": "int"}, + "total_datapoint_count": {"key": "totalDatapointCount", "type": "int"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.completed_datapoint_count = None + self.incremental_data_last_refresh_date_time = None + self.skipped_datapoint_count = None + self.total_datapoint_count = None + + +class PyTorch(DistributionConfiguration): + """PyTorch distribution configuration. + + All required parameters must be populated in order to send to Azure. + + :ivar distribution_type: [Required] Specifies the type of distribution framework. Required. + Known values are: "PyTorch", "TensorFlow", and "Mpi". + :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType + :ivar process_count_per_instance: Number of processes per node. + :vartype process_count_per_instance: int + """ + + _validation = { + "distribution_type": {"required": True}, + } + + _attribute_map = { + "distribution_type": {"key": "distributionType", "type": "str"}, + "process_count_per_instance": {"key": "processCountPerInstance", "type": "int"}, + } + + def __init__(self, *, process_count_per_instance: Optional[int] = None, **kwargs): + """ + :keyword process_count_per_instance: Number of processes per node. + :paramtype process_count_per_instance: int + """ + super().__init__(**kwargs) + self.distribution_type = "PyTorch" # type: str + self.process_count_per_instance = process_count_per_instance + + +class QuotaBaseProperties(_serialization.Model): + """The properties for Quota update or retrieval. + + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar type: Specifies the resource type. + :vartype type: str + :ivar limit: The maximum permitted quota of the resource. + :vartype limit: int + :ivar unit: An enum describing the unit of quota measurement. "Count" + :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit + """ + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "limit": {"key": "limit", "type": "int"}, + "unit": {"key": "unit", "type": "str"}, + } + + def __init__( + self, + *, + id: Optional[str] = None, # pylint: disable=redefined-builtin + type: Optional[str] = None, + limit: Optional[int] = None, + unit: Optional[Union[str, "_models.QuotaUnit"]] = None, + **kwargs + ): + """ + :keyword id: Specifies the resource ID. + :paramtype id: str + :keyword type: Specifies the resource type. + :paramtype type: str + :keyword limit: The maximum permitted quota of the resource. + :paramtype limit: int + :keyword unit: An enum describing the unit of quota measurement. "Count" + :paramtype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit + """ + super().__init__(**kwargs) + self.id = id + self.type = type + self.limit = limit + self.unit = unit + + +class QuotaUpdateParameters(_serialization.Model): + """Quota update parameters. + + :ivar value: The list for update quota. + :vartype value: list[~azure.mgmt.machinelearningservices.models.QuotaBaseProperties] + :ivar location: Region of workspace quota to be updated. + :vartype location: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[QuotaBaseProperties]"}, + "location": {"key": "location", "type": "str"}, + } + + def __init__( + self, *, value: Optional[List["_models.QuotaBaseProperties"]] = None, location: Optional[str] = None, **kwargs + ): + """ + :keyword value: The list for update quota. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.QuotaBaseProperties] + :keyword location: Region of workspace quota to be updated. + :paramtype location: str + """ + super().__init__(**kwargs) + self.value = value + self.location = location + + +class RandomSamplingAlgorithm(SamplingAlgorithm): + """Defines a Sampling Algorithm that generates values randomly. + + All required parameters must be populated in order to send to Azure. + + :ivar sampling_algorithm_type: [Required] The algorithm used for generating hyperparameter + values, along with configuration properties. Required. Known values are: "Grid", "Random", and + "Bayesian". + :vartype sampling_algorithm_type: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + :ivar rule: The specific type of random algorithm. Known values are: "Random" and "Sobol". + :vartype rule: str or ~azure.mgmt.machinelearningservices.models.RandomSamplingAlgorithmRule + :ivar seed: An optional integer to use as the seed for random number generation. + :vartype seed: int + """ + + _validation = { + "sampling_algorithm_type": {"required": True}, + } + + _attribute_map = { + "sampling_algorithm_type": {"key": "samplingAlgorithmType", "type": "str"}, + "rule": {"key": "rule", "type": "str"}, + "seed": {"key": "seed", "type": "int"}, + } + + def __init__( + self, + *, + rule: Optional[Union[str, "_models.RandomSamplingAlgorithmRule"]] = None, + seed: Optional[int] = None, + **kwargs + ): + """ + :keyword rule: The specific type of random algorithm. Known values are: "Random" and "Sobol". + :paramtype rule: str or ~azure.mgmt.machinelearningservices.models.RandomSamplingAlgorithmRule + :keyword seed: An optional integer to use as the seed for random number generation. + :paramtype seed: int + """ + super().__init__(**kwargs) + self.sampling_algorithm_type = "Random" # type: str + self.rule = rule + self.seed = seed + + +class Recurrence(_serialization.Model): + """The workflow trigger recurrence for ComputeStartStop schedule type. + + :ivar frequency: [Required] The frequency to trigger schedule. Known values are: "Minute", + "Hour", "Day", "Week", and "Month". + :vartype frequency: str or ~azure.mgmt.machinelearningservices.models.RecurrenceFrequency + :ivar interval: [Required] Specifies schedule interval in conjunction with frequency. + :vartype interval: int + :ivar start_time: The start time in yyyy-MM-ddTHH:mm:ss format. + :vartype start_time: str + :ivar time_zone: Specifies time zone in which the schedule runs. + TimeZone should follow Windows time zone format. Refer: + https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. + :vartype time_zone: str + :ivar schedule: [Required] The recurrence schedule. + :vartype schedule: ~azure.mgmt.machinelearningservices.models.RecurrenceSchedule + """ + + _attribute_map = { + "frequency": {"key": "frequency", "type": "str"}, + "interval": {"key": "interval", "type": "int"}, + "start_time": {"key": "startTime", "type": "str"}, + "time_zone": {"key": "timeZone", "type": "str"}, + "schedule": {"key": "schedule", "type": "RecurrenceSchedule"}, + } + + def __init__( + self, + *, + frequency: Optional[Union[str, "_models.RecurrenceFrequency"]] = None, + interval: Optional[int] = None, + start_time: Optional[str] = None, + time_zone: str = "UTC", + schedule: Optional["_models.RecurrenceSchedule"] = None, + **kwargs + ): + """ + :keyword frequency: [Required] The frequency to trigger schedule. Known values are: "Minute", + "Hour", "Day", "Week", and "Month". + :paramtype frequency: str or ~azure.mgmt.machinelearningservices.models.RecurrenceFrequency + :keyword interval: [Required] Specifies schedule interval in conjunction with frequency. + :paramtype interval: int + :keyword start_time: The start time in yyyy-MM-ddTHH:mm:ss format. + :paramtype start_time: str + :keyword time_zone: Specifies time zone in which the schedule runs. + TimeZone should follow Windows time zone format. Refer: + https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. + :paramtype time_zone: str + :keyword schedule: [Required] The recurrence schedule. + :paramtype schedule: ~azure.mgmt.machinelearningservices.models.RecurrenceSchedule + """ + super().__init__(**kwargs) + self.frequency = frequency + self.interval = interval + self.start_time = start_time + self.time_zone = time_zone + self.schedule = schedule + + +class RecurrenceSchedule(_serialization.Model): + """RecurrenceSchedule. + + All required parameters must be populated in order to send to Azure. + + :ivar hours: [Required] List of hours for the schedule. Required. + :vartype hours: list[int] + :ivar minutes: [Required] List of minutes for the schedule. Required. + :vartype minutes: list[int] + :ivar month_days: List of month days for the schedule. + :vartype month_days: list[int] + :ivar week_days: List of days for the schedule. + :vartype week_days: list[str or ~azure.mgmt.machinelearningservices.models.WeekDay] + """ + + _validation = { + "hours": {"required": True}, + "minutes": {"required": True}, + } + + _attribute_map = { + "hours": {"key": "hours", "type": "[int]"}, + "minutes": {"key": "minutes", "type": "[int]"}, + "month_days": {"key": "monthDays", "type": "[int]"}, + "week_days": {"key": "weekDays", "type": "[str]"}, + } + + def __init__( + self, + *, + hours: List[int], + minutes: List[int], + month_days: Optional[List[int]] = None, + week_days: Optional[List[Union[str, "_models.WeekDay"]]] = None, + **kwargs + ): + """ + :keyword hours: [Required] List of hours for the schedule. Required. + :paramtype hours: list[int] + :keyword minutes: [Required] List of minutes for the schedule. Required. + :paramtype minutes: list[int] + :keyword month_days: List of month days for the schedule. + :paramtype month_days: list[int] + :keyword week_days: List of days for the schedule. + :paramtype week_days: list[str or ~azure.mgmt.machinelearningservices.models.WeekDay] + """ + super().__init__(**kwargs) + self.hours = hours + self.minutes = minutes + self.month_days = month_days + self.week_days = week_days + + +class RecurrenceTrigger(TriggerBase): + """RecurrenceTrigger. + + All required parameters must be populated in order to send to Azure. + + :ivar end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer + https://en.wikipedia.org/wiki/ISO_8601. + Recommented format would be "2022-06-01T00:00:01" + If not present, the schedule will run indefinitely. + :vartype end_time: str + :ivar start_time: Specifies start time of schedule in ISO 8601 format, but without a UTC + offset. + :vartype start_time: str + :ivar time_zone: Specifies time zone in which the schedule runs. + TimeZone should follow Windows time zone format. Refer: + https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. + :vartype time_zone: str + :ivar trigger_type: [Required]. Required. Known values are: "Recurrence" and "Cron". + :vartype trigger_type: str or ~azure.mgmt.machinelearningservices.models.TriggerType + :ivar frequency: [Required] The frequency to trigger schedule. Required. Known values are: + "Minute", "Hour", "Day", "Week", and "Month". + :vartype frequency: str or ~azure.mgmt.machinelearningservices.models.RecurrenceFrequency + :ivar interval: [Required] Specifies schedule interval in conjunction with frequency. Required. + :vartype interval: int + :ivar schedule: The recurrence schedule. + :vartype schedule: ~azure.mgmt.machinelearningservices.models.RecurrenceSchedule + """ + + _validation = { + "trigger_type": {"required": True}, + "frequency": {"required": True}, + "interval": {"required": True}, + } + + _attribute_map = { + "end_time": {"key": "endTime", "type": "str"}, + "start_time": {"key": "startTime", "type": "str"}, + "time_zone": {"key": "timeZone", "type": "str"}, + "trigger_type": {"key": "triggerType", "type": "str"}, + "frequency": {"key": "frequency", "type": "str"}, + "interval": {"key": "interval", "type": "int"}, + "schedule": {"key": "schedule", "type": "RecurrenceSchedule"}, + } + + def __init__( + self, + *, + frequency: Union[str, "_models.RecurrenceFrequency"], + interval: int, + end_time: Optional[str] = None, + start_time: Optional[str] = None, + time_zone: str = "UTC", + schedule: Optional["_models.RecurrenceSchedule"] = None, + **kwargs + ): + """ + :keyword end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer + https://en.wikipedia.org/wiki/ISO_8601. + Recommented format would be "2022-06-01T00:00:01" + If not present, the schedule will run indefinitely. + :paramtype end_time: str + :keyword start_time: Specifies start time of schedule in ISO 8601 format, but without a UTC + offset. + :paramtype start_time: str + :keyword time_zone: Specifies time zone in which the schedule runs. + TimeZone should follow Windows time zone format. Refer: + https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. + :paramtype time_zone: str + :keyword frequency: [Required] The frequency to trigger schedule. Required. Known values are: + "Minute", "Hour", "Day", "Week", and "Month". + :paramtype frequency: str or ~azure.mgmt.machinelearningservices.models.RecurrenceFrequency + :keyword interval: [Required] Specifies schedule interval in conjunction with frequency. + Required. + :paramtype interval: int + :keyword schedule: The recurrence schedule. + :paramtype schedule: ~azure.mgmt.machinelearningservices.models.RecurrenceSchedule + """ + super().__init__(end_time=end_time, start_time=start_time, time_zone=time_zone, **kwargs) + self.trigger_type = "Recurrence" # type: str + self.frequency = frequency + self.interval = interval + self.schedule = schedule + + +class RegenerateEndpointKeysRequest(_serialization.Model): + """RegenerateEndpointKeysRequest. + + All required parameters must be populated in order to send to Azure. + + :ivar key_type: [Required] Specification for which type of key to generate. Primary or + Secondary. Required. Known values are: "Primary" and "Secondary". + :vartype key_type: str or ~azure.mgmt.machinelearningservices.models.KeyType + :ivar key_value: The value the key is set to. + :vartype key_value: str + """ + + _validation = { + "key_type": {"required": True}, + } + + _attribute_map = { + "key_type": {"key": "keyType", "type": "str"}, + "key_value": {"key": "keyValue", "type": "str"}, + } + + def __init__(self, *, key_type: Union[str, "_models.KeyType"], key_value: Optional[str] = None, **kwargs): + """ + :keyword key_type: [Required] Specification for which type of key to generate. Primary or + Secondary. Required. Known values are: "Primary" and "Secondary". + :paramtype key_type: str or ~azure.mgmt.machinelearningservices.models.KeyType + :keyword key_value: The value the key is set to. + :paramtype key_value: str + """ + super().__init__(**kwargs) + self.key_type = key_type + self.key_value = key_value + + +class Registry(TrackedResource): + """Registry. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar identity: Managed service identity (system assigned and/or user assigned identities). + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :vartype kind: str + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.RegistryProperties + :ivar sku: Sku details required for ARM contract for Autoscaling. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "RegistryProperties"}, + "sku": {"key": "sku", "type": "Sku"}, + } + + def __init__( + self, + *, + location: str, + properties: "_models.RegistryProperties", + tags: Optional[Dict[str, str]] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, + kind: Optional[str] = None, + sku: Optional["_models.Sku"] = None, + **kwargs + ): + """ + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + :keyword location: The geo-location where the resource lives. Required. + :paramtype location: str + :keyword identity: Managed service identity (system assigned and/or user assigned identities). + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :paramtype kind: str + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.RegistryProperties + :keyword sku: Sku details required for ARM contract for Autoscaling. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + super().__init__(tags=tags, location=location, **kwargs) + self.identity = identity + self.kind = kind + self.properties = properties + self.sku = sku + + +class RegistryListCredentialsResult(_serialization.Model): + """RegistryListCredentialsResult. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar location: + :vartype location: str + :ivar username: + :vartype username: str + :ivar passwords: + :vartype passwords: list[~azure.mgmt.machinelearningservices.models.Password] + """ + + _validation = { + "location": {"readonly": True}, + "username": {"readonly": True}, + } + + _attribute_map = { + "location": {"key": "location", "type": "str"}, + "username": {"key": "username", "type": "str"}, + "passwords": {"key": "passwords", "type": "[Password]"}, + } + + def __init__(self, *, passwords: Optional[List["_models.Password"]] = None, **kwargs): + """ + :keyword passwords: + :paramtype passwords: list[~azure.mgmt.machinelearningservices.models.Password] + """ + super().__init__(**kwargs) + self.location = None + self.username = None + self.passwords = passwords + + +class RegistryProperties(ResourceBase): + """Details of the Registry. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar public_network_access: + :vartype public_network_access: str + :ivar discovery_url: + :vartype discovery_url: str + :ivar intellectual_property_publisher: + :vartype intellectual_property_publisher: str + :ivar managed_resource_group: Managed resource group created for the registry. + :vartype managed_resource_group: ~azure.mgmt.machinelearningservices.models.ArmResourceId + :ivar ml_flow_registry_uri: + :vartype ml_flow_registry_uri: str + :ivar private_link_count: + :vartype private_link_count: int + :ivar region_details: Details of each region the registry is in. + :vartype region_details: + list[~azure.mgmt.machinelearningservices.models.RegistryRegionArmDetails] + """ + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "public_network_access": {"key": "publicNetworkAccess", "type": "str"}, + "discovery_url": {"key": "discoveryUrl", "type": "str"}, + "intellectual_property_publisher": {"key": "intellectualPropertyPublisher", "type": "str"}, + "managed_resource_group": {"key": "managedResourceGroup", "type": "ArmResourceId"}, + "ml_flow_registry_uri": {"key": "mlFlowRegistryUri", "type": "str"}, + "private_link_count": {"key": "privateLinkCount", "type": "int"}, + "region_details": {"key": "regionDetails", "type": "[RegistryRegionArmDetails]"}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + public_network_access: Optional[str] = None, + discovery_url: Optional[str] = None, + intellectual_property_publisher: Optional[str] = None, + managed_resource_group: Optional["_models.ArmResourceId"] = None, + ml_flow_registry_uri: Optional[str] = None, + private_link_count: Optional[int] = None, + region_details: Optional[List["_models.RegistryRegionArmDetails"]] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword public_network_access: + :paramtype public_network_access: str + :keyword discovery_url: + :paramtype discovery_url: str + :keyword intellectual_property_publisher: + :paramtype intellectual_property_publisher: str + :keyword managed_resource_group: Managed resource group created for the registry. + :paramtype managed_resource_group: ~azure.mgmt.machinelearningservices.models.ArmResourceId + :keyword ml_flow_registry_uri: + :paramtype ml_flow_registry_uri: str + :keyword private_link_count: + :paramtype private_link_count: int + :keyword region_details: Details of each region the registry is in. + :paramtype region_details: + list[~azure.mgmt.machinelearningservices.models.RegistryRegionArmDetails] + """ + super().__init__(description=description, properties=properties, tags=tags, **kwargs) + self.public_network_access = public_network_access + self.discovery_url = discovery_url + self.intellectual_property_publisher = intellectual_property_publisher + self.managed_resource_group = managed_resource_group + self.ml_flow_registry_uri = ml_flow_registry_uri + self.private_link_count = private_link_count + self.region_details = region_details + + +class RegistryRegionArmDetails(_serialization.Model): + """Details for each region the registry is in. + + :ivar acr_details: List of ACR accounts. + :vartype acr_details: list[~azure.mgmt.machinelearningservices.models.AcrDetails] + :ivar location: The location where the registry exists. + :vartype location: str + :ivar storage_account_details: List of storage accounts. + :vartype storage_account_details: + list[~azure.mgmt.machinelearningservices.models.StorageAccountDetails] + """ + + _attribute_map = { + "acr_details": {"key": "acrDetails", "type": "[AcrDetails]"}, + "location": {"key": "location", "type": "str"}, + "storage_account_details": {"key": "storageAccountDetails", "type": "[StorageAccountDetails]"}, + } + + def __init__( + self, + *, + acr_details: Optional[List["_models.AcrDetails"]] = None, + location: Optional[str] = None, + storage_account_details: Optional[List["_models.StorageAccountDetails"]] = None, + **kwargs + ): + """ + :keyword acr_details: List of ACR accounts. + :paramtype acr_details: list[~azure.mgmt.machinelearningservices.models.AcrDetails] + :keyword location: The location where the registry exists. + :paramtype location: str + :keyword storage_account_details: List of storage accounts. + :paramtype storage_account_details: + list[~azure.mgmt.machinelearningservices.models.StorageAccountDetails] + """ + super().__init__(**kwargs) + self.acr_details = acr_details + self.location = location + self.storage_account_details = storage_account_details + + +class RegistryTrackedResourceArmPaginatedResult(_serialization.Model): + """A paginated list of Registry entities. + + :ivar next_link: The link to the next page of Registry objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type Registry. + :vartype value: list[~azure.mgmt.machinelearningservices.models.Registry] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[Registry]"}, + } + + def __init__(self, *, next_link: Optional[str] = None, value: Optional[List["_models.Registry"]] = None, **kwargs): + """ + :keyword next_link: The link to the next page of Registry objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type Registry. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.Registry] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class Regression(TableVertical, AutoMLVertical): # pylint: disable=too-many-instance-attributes + """Regression task in AutoML Table vertical. + + All required parameters must be populated in order to send to Azure. + + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar cv_split_column_names: Columns to use for CVSplit data. + :vartype cv_split_column_names: list[str] + :ivar featurization_settings: Featurization inputs needed for AutoML job. + :vartype featurization_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :ivar fixed_parameters: Model/training parameters that will remain constant throughout + training. + :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters + :ivar limit_settings: Execution constraints for AutoMLJob. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings + :ivar n_cross_validations: Number of cross validation folds to be applied on training dataset + when validation dataset is not provided. + :vartype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] + :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings + :ivar test_data: Test data input. + :vartype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar test_data_size: The fraction of test dataset that needs to be set aside for validation + purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype test_data_size: float + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar weight_column_name: The name of the sample weight column. Automated ML supports a + weighted column as an input, causing rows in the data to be weighted up or down. + :vartype weight_column_name: str + :ivar primary_metric: Primary metric for regression task. Known values are: + "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", and + "NormalizedMeanAbsoluteError". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.RegressionPrimaryMetrics + :ivar training_settings: Inputs for training phase for an AutoML Job. + :vartype training_settings: + ~azure.mgmt.machinelearningservices.models.RegressionTrainingSettings + """ + + _validation = { + "task_type": {"required": True}, + "training_data": {"required": True}, + } + + _attribute_map = { + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "cv_split_column_names": {"key": "cvSplitColumnNames", "type": "[str]"}, + "featurization_settings": {"key": "featurizationSettings", "type": "TableVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "TableFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "TableVerticalLimitSettings"}, + "n_cross_validations": {"key": "nCrossValidations", "type": "NCrossValidations"}, + "search_space": {"key": "searchSpace", "type": "[TableParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "TableSweepSettings"}, + "test_data": {"key": "testData", "type": "MLTableJobInput"}, + "test_data_size": {"key": "testDataSize", "type": "float"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "weight_column_name": {"key": "weightColumnName", "type": "str"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, + "training_settings": {"key": "trainingSettings", "type": "RegressionTrainingSettings"}, + } + + def __init__( + self, + *, + training_data: "_models.MLTableJobInput", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + cv_split_column_names: Optional[List[str]] = None, + featurization_settings: Optional["_models.TableVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.TableFixedParameters"] = None, + limit_settings: Optional["_models.TableVerticalLimitSettings"] = None, + n_cross_validations: Optional["_models.NCrossValidations"] = None, + search_space: Optional[List["_models.TableParameterSubspace"]] = None, + sweep_settings: Optional["_models.TableSweepSettings"] = None, + test_data: Optional["_models.MLTableJobInput"] = None, + test_data_size: Optional[float] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + weight_column_name: Optional[str] = None, + primary_metric: Optional[Union[str, "_models.RegressionPrimaryMetrics"]] = None, + training_settings: Optional["_models.RegressionTrainingSettings"] = None, + **kwargs + ): + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword cv_split_column_names: Columns to use for CVSplit data. + :paramtype cv_split_column_names: list[str] + :keyword featurization_settings: Featurization inputs needed for AutoML job. + :paramtype featurization_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :keyword fixed_parameters: Model/training parameters that will remain constant throughout + training. + :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters + :keyword limit_settings: Execution constraints for AutoMLJob. + :paramtype limit_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings + :keyword n_cross_validations: Number of cross validation folds to be applied on training + dataset + when validation dataset is not provided. + :paramtype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] + :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings + :keyword test_data: Test data input. + :paramtype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword test_data_size: The fraction of test dataset that needs to be set aside for validation + purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype test_data_size: float + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword weight_column_name: The name of the sample weight column. Automated ML supports a + weighted column as an input, causing rows in the data to be weighted up or down. + :paramtype weight_column_name: str + :keyword primary_metric: Primary metric for regression task. Known values are: + "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", and + "NormalizedMeanAbsoluteError". + :paramtype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.RegressionPrimaryMetrics + :keyword training_settings: Inputs for training phase for an AutoML Job. + :paramtype training_settings: + ~azure.mgmt.machinelearningservices.models.RegressionTrainingSettings + """ + super().__init__( + cv_split_column_names=cv_split_column_names, + featurization_settings=featurization_settings, + fixed_parameters=fixed_parameters, + limit_settings=limit_settings, + n_cross_validations=n_cross_validations, + search_space=search_space, + sweep_settings=sweep_settings, + test_data=test_data, + test_data_size=test_data_size, + validation_data=validation_data, + validation_data_size=validation_data_size, + weight_column_name=weight_column_name, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type = "Regression" # type: str + self.training_data = training_data + self.primary_metric = primary_metric + self.training_settings = training_settings + self.cv_split_column_names = cv_split_column_names + self.featurization_settings = featurization_settings + self.fixed_parameters = fixed_parameters + self.limit_settings = limit_settings + self.n_cross_validations = n_cross_validations + self.search_space = search_space + self.sweep_settings = sweep_settings + self.test_data = test_data + self.test_data_size = test_data_size + self.validation_data = validation_data + self.validation_data_size = validation_data_size + self.weight_column_name = weight_column_name + + +class RegressionTrainingSettings(TrainingSettings): + """Regression Training related configuration. + + :ivar enable_dnn_training: Enable recommendation of DNN models. + :vartype enable_dnn_training: bool + :ivar enable_model_explainability: Flag to turn on explainability on best model. + :vartype enable_model_explainability: bool + :ivar enable_onnx_compatible_models: Flag for enabling onnx compatible models. + :vartype enable_onnx_compatible_models: bool + :ivar enable_stack_ensemble: Enable stack ensemble run. + :vartype enable_stack_ensemble: bool + :ivar enable_vote_ensemble: Enable voting ensemble run. + :vartype enable_vote_ensemble: bool + :ivar ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model + generation, multiple fitted models from the previous child runs are downloaded. + Configure this parameter with a higher value than 300 secs, if more time is needed. + :vartype ensemble_model_download_timeout: ~datetime.timedelta + :ivar stack_ensemble_settings: Stack ensemble settings for stack ensemble run. + :vartype stack_ensemble_settings: + ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + :ivar allowed_training_algorithms: Allowed models for regression task. + :vartype allowed_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.RegressionModels] + :ivar blocked_training_algorithms: Blocked models for regression task. + :vartype blocked_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.RegressionModels] + """ + + _attribute_map = { + "enable_dnn_training": {"key": "enableDnnTraining", "type": "bool"}, + "enable_model_explainability": {"key": "enableModelExplainability", "type": "bool"}, + "enable_onnx_compatible_models": {"key": "enableOnnxCompatibleModels", "type": "bool"}, + "enable_stack_ensemble": {"key": "enableStackEnsemble", "type": "bool"}, + "enable_vote_ensemble": {"key": "enableVoteEnsemble", "type": "bool"}, + "ensemble_model_download_timeout": {"key": "ensembleModelDownloadTimeout", "type": "duration"}, + "stack_ensemble_settings": {"key": "stackEnsembleSettings", "type": "StackEnsembleSettings"}, + "allowed_training_algorithms": {"key": "allowedTrainingAlgorithms", "type": "[str]"}, + "blocked_training_algorithms": {"key": "blockedTrainingAlgorithms", "type": "[str]"}, + } + + def __init__( + self, + *, + enable_dnn_training: bool = False, + enable_model_explainability: bool = True, + enable_onnx_compatible_models: bool = False, + enable_stack_ensemble: bool = True, + enable_vote_ensemble: bool = True, + ensemble_model_download_timeout: datetime.timedelta = "PT5M", + stack_ensemble_settings: Optional["_models.StackEnsembleSettings"] = None, + allowed_training_algorithms: Optional[List[Union[str, "_models.RegressionModels"]]] = None, + blocked_training_algorithms: Optional[List[Union[str, "_models.RegressionModels"]]] = None, + **kwargs + ): + """ + :keyword enable_dnn_training: Enable recommendation of DNN models. + :paramtype enable_dnn_training: bool + :keyword enable_model_explainability: Flag to turn on explainability on best model. + :paramtype enable_model_explainability: bool + :keyword enable_onnx_compatible_models: Flag for enabling onnx compatible models. + :paramtype enable_onnx_compatible_models: bool + :keyword enable_stack_ensemble: Enable stack ensemble run. + :paramtype enable_stack_ensemble: bool + :keyword enable_vote_ensemble: Enable voting ensemble run. + :paramtype enable_vote_ensemble: bool + :keyword ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model + generation, multiple fitted models from the previous child runs are downloaded. + Configure this parameter with a higher value than 300 secs, if more time is needed. + :paramtype ensemble_model_download_timeout: ~datetime.timedelta + :keyword stack_ensemble_settings: Stack ensemble settings for stack ensemble run. + :paramtype stack_ensemble_settings: + ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + :keyword allowed_training_algorithms: Allowed models for regression task. + :paramtype allowed_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.RegressionModels] + :keyword blocked_training_algorithms: Blocked models for regression task. + :paramtype blocked_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.RegressionModels] + """ + super().__init__( + enable_dnn_training=enable_dnn_training, + enable_model_explainability=enable_model_explainability, + enable_onnx_compatible_models=enable_onnx_compatible_models, + enable_stack_ensemble=enable_stack_ensemble, + enable_vote_ensemble=enable_vote_ensemble, + ensemble_model_download_timeout=ensemble_model_download_timeout, + stack_ensemble_settings=stack_ensemble_settings, + **kwargs + ) + self.allowed_training_algorithms = allowed_training_algorithms + self.blocked_training_algorithms = blocked_training_algorithms + + +class ResourceId(_serialization.Model): + """Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The ID of the resource. Required. + :vartype id: str + """ + + _validation = { + "id": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + } + + def __init__(self, *, id: str, **kwargs): # pylint: disable=redefined-builtin + """ + :keyword id: The ID of the resource. Required. + :paramtype id: str + """ + super().__init__(**kwargs) + self.id = id + + +class ResourceName(_serialization.Model): + """The Resource Name. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: The name of the resource. + :vartype value: str + :ivar localized_value: The localized name of the resource. + :vartype localized_value: str + """ + + _validation = { + "value": {"readonly": True}, + "localized_value": {"readonly": True}, + } + + _attribute_map = { + "value": {"key": "value", "type": "str"}, + "localized_value": {"key": "localizedValue", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.value = None + self.localized_value = None + + +class ResourceQuota(_serialization.Model): + """The quota assigned to a resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar aml_workspace_location: Region of the AML workspace in the id. + :vartype aml_workspace_location: str + :ivar type: Specifies the resource type. + :vartype type: str + :ivar name: Name of the resource. + :vartype name: ~azure.mgmt.machinelearningservices.models.ResourceName + :ivar limit: The maximum permitted quota of the resource. + :vartype limit: int + :ivar unit: An enum describing the unit of quota measurement. "Count" + :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit + """ + + _validation = { + "id": {"readonly": True}, + "aml_workspace_location": {"readonly": True}, + "type": {"readonly": True}, + "name": {"readonly": True}, + "limit": {"readonly": True}, + "unit": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "aml_workspace_location": {"key": "amlWorkspaceLocation", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "name": {"key": "name", "type": "ResourceName"}, + "limit": {"key": "limit", "type": "int"}, + "unit": {"key": "unit", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.id = None + self.aml_workspace_location = None + self.type = None + self.name = None + self.limit = None + self.unit = None + + +class Route(_serialization.Model): + """Route. + + All required parameters must be populated in order to send to Azure. + + :ivar path: [Required] The path for the route. Required. + :vartype path: str + :ivar port: [Required] The port for the route. Required. + :vartype port: int + """ + + _validation = { + "path": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + "port": {"required": True}, + } + + _attribute_map = { + "path": {"key": "path", "type": "str"}, + "port": {"key": "port", "type": "int"}, + } + + def __init__(self, *, path: str, port: int, **kwargs): + """ + :keyword path: [Required] The path for the route. Required. + :paramtype path: str + :keyword port: [Required] The port for the route. Required. + :paramtype port: int + """ + super().__init__(**kwargs) + self.path = path + self.port = port + + +class SASAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): + """SASAuthTypeWorkspaceConnectionProperties. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", and + "AccessKey". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. + :vartype category: str + :ivar target: + :vartype target: str + :ivar value: Value details of the workspace connection. + :vartype value: str + :ivar value_format: format for the workspace connection value. "JSON" + :vartype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :ivar credentials: + :vartype credentials: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionSharedAccessSignature + """ + + _validation = { + "auth_type": {"required": True}, + } + + _attribute_map = { + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "target": {"key": "target", "type": "str"}, + "value": {"key": "value", "type": "str"}, + "value_format": {"key": "valueFormat", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionSharedAccessSignature"}, + } + + def __init__( + self, + *, + category: Optional[str] = None, + target: Optional[str] = None, + value: Optional[str] = None, + value_format: Optional[Union[str, "_models.ValueFormat"]] = None, + credentials: Optional["_models.WorkspaceConnectionSharedAccessSignature"] = None, + **kwargs + ): + """ + :keyword category: Category of the connection. + :paramtype category: str + :keyword target: + :paramtype target: str + :keyword value: Value details of the workspace connection. + :paramtype value: str + :keyword value_format: format for the workspace connection value. "JSON" + :paramtype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :keyword credentials: + :paramtype credentials: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionSharedAccessSignature + """ + super().__init__(category=category, target=target, value=value, value_format=value_format, **kwargs) + self.auth_type = "SAS" # type: str + self.credentials = credentials + + +class SasDatastoreCredentials(DatastoreCredentials): + """SAS datastore credentials configuration. + + All required parameters must be populated in order to send to Azure. + + :ivar credentials_type: [Required] Credential type used to authentication with storage. + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". + :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType + :ivar secrets: [Required] Storage container secrets. Required. + :vartype secrets: ~azure.mgmt.machinelearningservices.models.SasDatastoreSecrets + """ + + _validation = { + "credentials_type": {"required": True}, + "secrets": {"required": True}, + } + + _attribute_map = { + "credentials_type": {"key": "credentialsType", "type": "str"}, + "secrets": {"key": "secrets", "type": "SasDatastoreSecrets"}, + } + + def __init__(self, *, secrets: "_models.SasDatastoreSecrets", **kwargs): + """ + :keyword secrets: [Required] Storage container secrets. Required. + :paramtype secrets: ~azure.mgmt.machinelearningservices.models.SasDatastoreSecrets + """ + super().__init__(**kwargs) + self.credentials_type = "Sas" # type: str + self.secrets = secrets + + +class SasDatastoreSecrets(DatastoreSecrets): + """Datastore SAS secrets. + + All required parameters must be populated in order to send to Azure. + + :ivar secrets_type: [Required] Credential type used to authentication with storage. Required. + Known values are: "AccountKey", "Certificate", "Sas", "ServicePrincipal", "KerberosPassword", + and "KerberosKeytab". + :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType + :ivar sas_token: Storage container SAS token. + :vartype sas_token: str + """ + + _validation = { + "secrets_type": {"required": True}, + } + + _attribute_map = { + "secrets_type": {"key": "secretsType", "type": "str"}, + "sas_token": {"key": "sasToken", "type": "str"}, + } + + def __init__(self, *, sas_token: Optional[str] = None, **kwargs): + """ + :keyword sas_token: Storage container SAS token. + :paramtype sas_token: str + """ + super().__init__(**kwargs) + self.secrets_type = "Sas" # type: str + self.sas_token = sas_token + + +class ScaleSettings(_serialization.Model): + """scale settings for AML Compute. + + All required parameters must be populated in order to send to Azure. + + :ivar max_node_count: Max number of nodes to use. Required. + :vartype max_node_count: int + :ivar min_node_count: Min number of nodes to use. + :vartype min_node_count: int + :ivar node_idle_time_before_scale_down: Node Idle Time before scaling down amlCompute. This + string needs to be in the RFC Format. + :vartype node_idle_time_before_scale_down: ~datetime.timedelta + """ + + _validation = { + "max_node_count": {"required": True}, + } + + _attribute_map = { + "max_node_count": {"key": "maxNodeCount", "type": "int"}, + "min_node_count": {"key": "minNodeCount", "type": "int"}, + "node_idle_time_before_scale_down": {"key": "nodeIdleTimeBeforeScaleDown", "type": "duration"}, + } + + def __init__( + self, + *, + max_node_count: int, + min_node_count: int = 0, + node_idle_time_before_scale_down: Optional[datetime.timedelta] = None, + **kwargs + ): + """ + :keyword max_node_count: Max number of nodes to use. Required. + :paramtype max_node_count: int + :keyword min_node_count: Min number of nodes to use. + :paramtype min_node_count: int + :keyword node_idle_time_before_scale_down: Node Idle Time before scaling down amlCompute. This + string needs to be in the RFC Format. + :paramtype node_idle_time_before_scale_down: ~datetime.timedelta + """ + super().__init__(**kwargs) + self.max_node_count = max_node_count + self.min_node_count = min_node_count + self.node_idle_time_before_scale_down = node_idle_time_before_scale_down + + +class ScaleSettingsInformation(_serialization.Model): + """Desired scale settings for the amlCompute. + + :ivar scale_settings: scale settings for AML Compute. + :vartype scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings + """ + + _attribute_map = { + "scale_settings": {"key": "scaleSettings", "type": "ScaleSettings"}, + } + + def __init__(self, *, scale_settings: Optional["_models.ScaleSettings"] = None, **kwargs): + """ + :keyword scale_settings: scale settings for AML Compute. + :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings + """ + super().__init__(**kwargs) + self.scale_settings = scale_settings + + +class Schedule(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.ScheduleProperties """ _validation = { - 'compute_type': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ScheduleProperties"}, } - def __init__( - self, - *, - databricks_access_token: Optional[str] = None, - **kwargs - ): - super(DatabricksComputeSecrets, self).__init__(**kwargs) - self.compute_type = 'Databricks' # type: str - self.databricks_access_token = databricks_access_token + def __init__(self, *, properties: "_models.ScheduleProperties", **kwargs): + """ + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.ScheduleProperties + """ + super().__init__(**kwargs) + self.properties = properties -class DatabricksProperties(msrest.serialization.Model): - """DatabricksProperties. +class ScheduleBase(_serialization.Model): + """ScheduleBase. - :param databricks_access_token: Databricks access token. - :type databricks_access_token: str + :ivar id: A system assigned id for the schedule. + :vartype id: str + :ivar provisioning_status: The current deployment state of schedule. Known values are: + "Completed", "Provisioning", and "Failed". + :vartype provisioning_status: str or + ~azure.mgmt.machinelearningservices.models.ScheduleProvisioningState + :ivar status: Is the schedule enabled or disabled?. Known values are: "Enabled" and "Disabled". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.ScheduleStatus """ _attribute_map = { - 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "provisioning_status": {"key": "provisioningStatus", "type": "str"}, + "status": {"key": "status", "type": "str"}, } def __init__( self, *, - databricks_access_token: Optional[str] = None, + id: Optional[str] = None, # pylint: disable=redefined-builtin + provisioning_status: Optional[Union[str, "_models.ScheduleProvisioningState"]] = None, + status: Optional[Union[str, "_models.ScheduleStatus"]] = None, **kwargs ): - super(DatabricksProperties, self).__init__(**kwargs) - self.databricks_access_token = databricks_access_token + """ + :keyword id: A system assigned id for the schedule. + :paramtype id: str + :keyword provisioning_status: The current deployment state of schedule. Known values are: + "Completed", "Provisioning", and "Failed". + :paramtype provisioning_status: str or + ~azure.mgmt.machinelearningservices.models.ScheduleProvisioningState + :keyword status: Is the schedule enabled or disabled?. Known values are: "Enabled" and + "Disabled". + :paramtype status: str or ~azure.mgmt.machinelearningservices.models.ScheduleStatus + """ + super().__init__(**kwargs) + self.id = id + self.provisioning_status = provisioning_status + self.status = status -class DataFactory(Compute): - """A DataFactory compute. +class ScheduleProperties(ResourceBase): + """Base definition of a schedule. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar action: [Required] Specifies the action of the schedule. Required. + :vartype action: ~azure.mgmt.machinelearningservices.models.ScheduleActionBase + :ivar display_name: Display name of schedule. + :vartype display_name: str + :ivar is_enabled: Is the schedule enabled?. + :vartype is_enabled: bool + :ivar provisioning_state: Provisioning state for the schedule. Known values are: "Creating", + "Updating", "Deleting", "Succeeded", "Failed", and "Canceled". :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool + ~azure.mgmt.machinelearningservices.models.ScheduleProvisioningStatus + :ivar trigger: [Required] Specifies the trigger details. Required. + :vartype trigger: ~azure.mgmt.machinelearningservices.models.TriggerBase """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "action": {"required": True}, + "provisioning_state": {"readonly": True}, + "trigger": {"required": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "action": {"key": "action", "type": "ScheduleActionBase"}, + "display_name": {"key": "displayName", "type": "str"}, + "is_enabled": {"key": "isEnabled", "type": "bool"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "trigger": {"key": "trigger", "type": "TriggerBase"}, } def __init__( self, *, - compute_location: Optional[str] = None, + action: "_models.ScheduleActionBase", + trigger: "_models.TriggerBase", description: Optional[str] = None, - resource_id: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + display_name: Optional[str] = None, + is_enabled: bool = True, **kwargs ): - super(DataFactory, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) - self.compute_type = 'DataFactory' # type: str - - -class DataLakeAnalytics(Compute): - """A DataLakeAnalytics compute. + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword action: [Required] Specifies the action of the schedule. Required. + :paramtype action: ~azure.mgmt.machinelearningservices.models.ScheduleActionBase + :keyword display_name: Display name of schedule. + :paramtype display_name: str + :keyword is_enabled: Is the schedule enabled?. + :paramtype is_enabled: bool + :keyword trigger: [Required] Specifies the trigger details. Required. + :paramtype trigger: ~azure.mgmt.machinelearningservices.models.TriggerBase + """ + super().__init__(description=description, properties=properties, tags=tags, **kwargs) + self.action = action + self.display_name = display_name + self.is_enabled = is_enabled + self.provisioning_state = None + self.trigger = trigger - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. +class ScheduleResourceArmPaginatedResult(_serialization.Model): + """A paginated list of Schedule entities. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :param properties: - :type properties: ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsProperties + :ivar next_link: The link to the next page of Schedule objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type Schedule. + :vartype value: list[~azure.mgmt.machinelearningservices.models.Schedule] """ - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[Schedule]"}, } + def __init__(self, *, next_link: Optional[str] = None, value: Optional[List["_models.Schedule"]] = None, **kwargs): + """ + :keyword next_link: The link to the next page of Schedule objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type Schedule. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.Schedule] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class ScriptReference(_serialization.Model): + """Script reference. + + :ivar script_source: The storage source of the script: inline, workspace. + :vartype script_source: str + :ivar script_data: The location of scripts in the mounted volume. + :vartype script_data: str + :ivar script_arguments: Optional command line arguments passed to the script to run. + :vartype script_arguments: str + :ivar timeout: Optional time period passed to timeout command. + :vartype timeout: str + """ + _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsProperties'}, + "script_source": {"key": "scriptSource", "type": "str"}, + "script_data": {"key": "scriptData", "type": "str"}, + "script_arguments": {"key": "scriptArguments", "type": "str"}, + "timeout": {"key": "timeout", "type": "str"}, } def __init__( self, *, - compute_location: Optional[str] = None, - description: Optional[str] = None, - resource_id: Optional[str] = None, - properties: Optional["DataLakeAnalyticsProperties"] = None, + script_source: Optional[str] = None, + script_data: Optional[str] = None, + script_arguments: Optional[str] = None, + timeout: Optional[str] = None, **kwargs ): - super(DataLakeAnalytics, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) - self.compute_type = 'DataLakeAnalytics' # type: str - self.properties = properties - - -class DataLakeAnalyticsProperties(msrest.serialization.Model): - """DataLakeAnalyticsProperties. - - :param data_lake_store_account_name: DataLake Store Account Name. - :type data_lake_store_account_name: str + """ + :keyword script_source: The storage source of the script: inline, workspace. + :paramtype script_source: str + :keyword script_data: The location of scripts in the mounted volume. + :paramtype script_data: str + :keyword script_arguments: Optional command line arguments passed to the script to run. + :paramtype script_arguments: str + :keyword timeout: Optional time period passed to timeout command. + :paramtype timeout: str + """ + super().__init__(**kwargs) + self.script_source = script_source + self.script_data = script_data + self.script_arguments = script_arguments + self.timeout = timeout + + +class ScriptsToExecute(_serialization.Model): + """Customized setup scripts. + + :ivar startup_script: Script that's run every time the machine starts. + :vartype startup_script: ~azure.mgmt.machinelearningservices.models.ScriptReference + :ivar creation_script: Script that's run only once during provision of the compute. + :vartype creation_script: ~azure.mgmt.machinelearningservices.models.ScriptReference """ _attribute_map = { - 'data_lake_store_account_name': {'key': 'dataLakeStoreAccountName', 'type': 'str'}, + "startup_script": {"key": "startupScript", "type": "ScriptReference"}, + "creation_script": {"key": "creationScript", "type": "ScriptReference"}, } def __init__( self, *, - data_lake_store_account_name: Optional[str] = None, + startup_script: Optional["_models.ScriptReference"] = None, + creation_script: Optional["_models.ScriptReference"] = None, **kwargs ): - super(DataLakeAnalyticsProperties, self).__init__(**kwargs) - self.data_lake_store_account_name = data_lake_store_account_name + """ + :keyword startup_script: Script that's run every time the machine starts. + :paramtype startup_script: ~azure.mgmt.machinelearningservices.models.ScriptReference + :keyword creation_script: Script that's run only once during provision of the compute. + :paramtype creation_script: ~azure.mgmt.machinelearningservices.models.ScriptReference + """ + super().__init__(**kwargs) + self.startup_script = startup_script + self.creation_script = creation_script + + +class ServiceManagedResourcesSettings(_serialization.Model): + """ServiceManagedResourcesSettings. + + :ivar cosmos_db: The settings for the service managed cosmosdb account. + :vartype cosmos_db: ~azure.mgmt.machinelearningservices.models.CosmosDbSettings + """ + _attribute_map = { + "cosmos_db": {"key": "cosmosDb", "type": "CosmosDbSettings"}, + } + + def __init__(self, *, cosmos_db: Optional["_models.CosmosDbSettings"] = None, **kwargs): + """ + :keyword cosmos_db: The settings for the service managed cosmosdb account. + :paramtype cosmos_db: ~azure.mgmt.machinelearningservices.models.CosmosDbSettings + """ + super().__init__(**kwargs) + self.cosmos_db = cosmos_db -class EncryptionProperty(msrest.serialization.Model): - """EncryptionProperty. + +class ServicePrincipalAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): + """ServicePrincipalAuthTypeWorkspaceConnectionProperties. All required parameters must be populated in order to send to Azure. - :param status: Required. Indicates whether or not the encryption is enabled for the workspace. - Possible values include: "Enabled", "Disabled". - :type status: str or ~azure.mgmt.machinelearningservices.models.EncryptionStatus - :param key_vault_properties: Required. Customer Key vault properties. - :type key_vault_properties: ~azure.mgmt.machinelearningservices.models.KeyVaultProperties + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", and + "AccessKey". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. + :vartype category: str + :ivar target: + :vartype target: str + :ivar value: Value details of the workspace connection. + :vartype value: str + :ivar value_format: format for the workspace connection value. "JSON" + :vartype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :ivar credentials: + :vartype credentials: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionServicePrincipal """ _validation = { - 'status': {'required': True}, - 'key_vault_properties': {'required': True}, + "auth_type": {"required": True}, } _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'KeyVaultProperties'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "target": {"key": "target", "type": "str"}, + "value": {"key": "value", "type": "str"}, + "value_format": {"key": "valueFormat", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionServicePrincipal"}, } def __init__( self, *, - status: Union[str, "EncryptionStatus"], - key_vault_properties: "KeyVaultProperties", + category: Optional[str] = None, + target: Optional[str] = None, + value: Optional[str] = None, + value_format: Optional[Union[str, "_models.ValueFormat"]] = None, + credentials: Optional["_models.WorkspaceConnectionServicePrincipal"] = None, **kwargs ): - super(EncryptionProperty, self).__init__(**kwargs) - self.status = status - self.key_vault_properties = key_vault_properties - - -class ErrorDetail(msrest.serialization.Model): - """Error detail information. + """ + :keyword category: Category of the connection. + :paramtype category: str + :keyword target: + :paramtype target: str + :keyword value: Value details of the workspace connection. + :paramtype value: str + :keyword value_format: format for the workspace connection value. "JSON" + :paramtype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :keyword credentials: + :paramtype credentials: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionServicePrincipal + """ + super().__init__(category=category, target=target, value=value, value_format=value_format, **kwargs) + self.auth_type = "ServicePrincipal" # type: str + self.credentials = credentials + + +class ServicePrincipalDatastoreCredentials(DatastoreCredentials): + """Service Principal datastore credentials configuration. All required parameters must be populated in order to send to Azure. - :param code: Required. Error code. - :type code: str - :param message: Required. Error message. - :type message: str + :ivar credentials_type: [Required] Credential type used to authentication with storage. + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". + :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType + :ivar authority_url: Authority URL used for authentication. + :vartype authority_url: str + :ivar client_id: [Required] Service principal client ID. Required. + :vartype client_id: str + :ivar resource_url: Resource the service principal has access to. + :vartype resource_url: str + :ivar secrets: [Required] Service principal secrets. Required. + :vartype secrets: ~azure.mgmt.machinelearningservices.models.ServicePrincipalDatastoreSecrets + :ivar tenant_id: [Required] ID of the tenant to which the service principal belongs. Required. + :vartype tenant_id: str """ _validation = { - 'code': {'required': True}, - 'message': {'required': True}, + "credentials_type": {"required": True}, + "client_id": {"required": True}, + "secrets": {"required": True}, + "tenant_id": {"required": True}, } _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, + "credentials_type": {"key": "credentialsType", "type": "str"}, + "authority_url": {"key": "authorityUrl", "type": "str"}, + "client_id": {"key": "clientId", "type": "str"}, + "resource_url": {"key": "resourceUrl", "type": "str"}, + "secrets": {"key": "secrets", "type": "ServicePrincipalDatastoreSecrets"}, + "tenant_id": {"key": "tenantId", "type": "str"}, } def __init__( self, *, - code: str, - message: str, + client_id: str, + secrets: "_models.ServicePrincipalDatastoreSecrets", + tenant_id: str, + authority_url: Optional[str] = None, + resource_url: Optional[str] = None, **kwargs ): - super(ErrorDetail, self).__init__(**kwargs) - self.code = code - self.message = message + """ + :keyword authority_url: Authority URL used for authentication. + :paramtype authority_url: str + :keyword client_id: [Required] Service principal client ID. Required. + :paramtype client_id: str + :keyword resource_url: Resource the service principal has access to. + :paramtype resource_url: str + :keyword secrets: [Required] Service principal secrets. Required. + :paramtype secrets: ~azure.mgmt.machinelearningservices.models.ServicePrincipalDatastoreSecrets + :keyword tenant_id: [Required] ID of the tenant to which the service principal belongs. + Required. + :paramtype tenant_id: str + """ + super().__init__(**kwargs) + self.credentials_type = "ServicePrincipal" # type: str + self.authority_url = authority_url + self.client_id = client_id + self.resource_url = resource_url + self.secrets = secrets + self.tenant_id = tenant_id -class ErrorResponse(msrest.serialization.Model): - """Error response information. +class ServicePrincipalDatastoreSecrets(DatastoreSecrets): + """Datastore Service Principal secrets. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar code: Error code. - :vartype code: str - :ivar message: Error message. - :vartype message: str - :ivar details: An array of error detail objects. - :vartype details: list[~azure.mgmt.machinelearningservices.models.ErrorDetail] + :ivar secrets_type: [Required] Credential type used to authentication with storage. Required. + Known values are: "AccountKey", "Certificate", "Sas", "ServicePrincipal", "KerberosPassword", + and "KerberosKeytab". + :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType + :ivar client_secret: Service principal secret. + :vartype client_secret: str """ _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, - 'details': {'readonly': True}, + "secrets_type": {"required": True}, } _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'details': {'key': 'details', 'type': '[ErrorDetail]'}, + "secrets_type": {"key": "secretsType", "type": "str"}, + "client_secret": {"key": "clientSecret", "type": "str"}, } - def __init__( - self, - **kwargs - ): - super(ErrorResponse, self).__init__(**kwargs) - self.code = None - self.message = None - self.details = None - + def __init__(self, *, client_secret: Optional[str] = None, **kwargs): + """ + :keyword client_secret: Service principal secret. + :paramtype client_secret: str + """ + super().__init__(**kwargs) + self.secrets_type = "ServicePrincipal" # type: str + self.client_secret = client_secret -class EstimatedVMPrice(msrest.serialization.Model): - """The estimated price info for using a VM of a particular OS type, tier, etc. - All required parameters must be populated in order to send to Azure. +class SetupScripts(_serialization.Model): + """Details of customized scripts to execute for setting up the cluster. - :param retail_price: Required. The price charged for using the VM. - :type retail_price: float - :param os_type: Required. Operating system type used by the VM. Possible values include: - "Linux", "Windows". - :type os_type: str or ~azure.mgmt.machinelearningservices.models.VMPriceOSType - :param vm_tier: Required. The type of the VM. Possible values include: "Standard", - "LowPriority", "Spot". - :type vm_tier: str or ~azure.mgmt.machinelearningservices.models.VMTier + :ivar scripts: Customized setup scripts. + :vartype scripts: ~azure.mgmt.machinelearningservices.models.ScriptsToExecute """ - _validation = { - 'retail_price': {'required': True}, - 'os_type': {'required': True}, - 'vm_tier': {'required': True}, - } - _attribute_map = { - 'retail_price': {'key': 'retailPrice', 'type': 'float'}, - 'os_type': {'key': 'osType', 'type': 'str'}, - 'vm_tier': {'key': 'vmTier', 'type': 'str'}, + "scripts": {"key": "scripts", "type": "ScriptsToExecute"}, } - def __init__( - self, - *, - retail_price: float, - os_type: Union[str, "VMPriceOSType"], - vm_tier: Union[str, "VMTier"], - **kwargs - ): - super(EstimatedVMPrice, self).__init__(**kwargs) - self.retail_price = retail_price - self.os_type = os_type - self.vm_tier = vm_tier - + def __init__(self, *, scripts: Optional["_models.ScriptsToExecute"] = None, **kwargs): + """ + :keyword scripts: Customized setup scripts. + :paramtype scripts: ~azure.mgmt.machinelearningservices.models.ScriptsToExecute + """ + super().__init__(**kwargs) + self.scripts = scripts -class EstimatedVMPrices(msrest.serialization.Model): - """The estimated price info for using a VM. - All required parameters must be populated in order to send to Azure. +class SharedPrivateLinkResource(_serialization.Model): + """SharedPrivateLinkResource. - :param billing_currency: Required. Three lettered code specifying the currency of the VM price. - Example: USD. Possible values include: "USD". - :type billing_currency: str or ~azure.mgmt.machinelearningservices.models.BillingCurrency - :param unit_of_measure: Required. The unit of time measurement for the specified VM price. - Example: OneHour. Possible values include: "OneHour". - :type unit_of_measure: str or ~azure.mgmt.machinelearningservices.models.UnitOfMeasure - :param values: Required. The list of estimated prices for using a VM of a particular OS type, - tier, etc. - :type values: list[~azure.mgmt.machinelearningservices.models.EstimatedVMPrice] + :ivar name: Unique name of the private link. + :vartype name: str + :ivar private_link_resource_id: The resource id that private link links to. + :vartype private_link_resource_id: str + :ivar group_id: The private link resource group id. + :vartype group_id: str + :ivar request_message: Request message. + :vartype request_message: str + :ivar status: Indicates whether the connection has been Approved/Rejected/Removed by the owner + of the service. Known values are: "Pending", "Approved", "Rejected", "Disconnected", and + "Timeout". + :vartype status: str or + ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus """ - _validation = { - 'billing_currency': {'required': True}, - 'unit_of_measure': {'required': True}, - 'values': {'required': True}, - } - _attribute_map = { - 'billing_currency': {'key': 'billingCurrency', 'type': 'str'}, - 'unit_of_measure': {'key': 'unitOfMeasure', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[EstimatedVMPrice]'}, + "name": {"key": "name", "type": "str"}, + "private_link_resource_id": {"key": "properties.privateLinkResourceId", "type": "str"}, + "group_id": {"key": "properties.groupId", "type": "str"}, + "request_message": {"key": "properties.requestMessage", "type": "str"}, + "status": {"key": "properties.status", "type": "str"}, } def __init__( self, *, - billing_currency: Union[str, "BillingCurrency"], - unit_of_measure: Union[str, "UnitOfMeasure"], - values: List["EstimatedVMPrice"], + name: Optional[str] = None, + private_link_resource_id: Optional[str] = None, + group_id: Optional[str] = None, + request_message: Optional[str] = None, + status: Optional[Union[str, "_models.PrivateEndpointServiceConnectionStatus"]] = None, **kwargs ): - super(EstimatedVMPrices, self).__init__(**kwargs) - self.billing_currency = billing_currency - self.unit_of_measure = unit_of_measure - self.values = values - + """ + :keyword name: Unique name of the private link. + :paramtype name: str + :keyword private_link_resource_id: The resource id that private link links to. + :paramtype private_link_resource_id: str + :keyword group_id: The private link resource group id. + :paramtype group_id: str + :keyword request_message: Request message. + :paramtype request_message: str + :keyword status: Indicates whether the connection has been Approved/Rejected/Removed by the + owner of the service. Known values are: "Pending", "Approved", "Rejected", "Disconnected", and + "Timeout". + :paramtype status: str or + ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus + """ + super().__init__(**kwargs) + self.name = name + self.private_link_resource_id = private_link_resource_id + self.group_id = group_id + self.request_message = request_message + self.status = status -class HDInsight(Compute): - """A HDInsight compute. - Variables are only populated by the server, and will be ignored when sending a request. +class Sku(_serialization.Model): + """The resource model definition representing SKU. All required parameters must be populated in order to send to Azure. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :param properties: - :type properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties + :ivar name: The name of the SKU. Ex - P3. It is typically a letter+number code. Required. + :vartype name: str + :ivar tier: This field is required to be implemented by the Resource Provider if the service + has more than one tier, but is not required on a PUT. Known values are: "Free", "Basic", + "Standard", and "Premium". + :vartype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier + :ivar size: The SKU size. When the name field is the combination of tier and some other value, + this would be the standalone code. + :vartype size: str + :ivar family: If the service has different generations of hardware, for the same SKU, then that + can be captured here. + :vartype family: str + :ivar capacity: If the SKU supports scale out/in then the capacity integer should be included. + If scale out/in is not possible for the resource this may be omitted. + :vartype capacity: int """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "name": {"required": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'HDInsightProperties'}, + "name": {"key": "name", "type": "str"}, + "tier": {"key": "tier", "type": "str"}, + "size": {"key": "size", "type": "str"}, + "family": {"key": "family", "type": "str"}, + "capacity": {"key": "capacity", "type": "int"}, } def __init__( self, *, - compute_location: Optional[str] = None, - description: Optional[str] = None, - resource_id: Optional[str] = None, - properties: Optional["HDInsightProperties"] = None, + name: str, + tier: Optional[Union[str, "_models.SkuTier"]] = None, + size: Optional[str] = None, + family: Optional[str] = None, + capacity: Optional[int] = None, **kwargs ): - super(HDInsight, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) - self.compute_type = 'HDInsight' # type: str - self.properties = properties - - -class HDInsightProperties(msrest.serialization.Model): - """HDInsightProperties. - - :param ssh_port: Port open for ssh connections on the master node of the cluster. - :type ssh_port: int - :param address: Public IP address of the master node of the cluster. - :type address: str - :param administrator_account: Admin credentials for master node of the cluster. - :type administrator_account: - ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + """ + :keyword name: The name of the SKU. Ex - P3. It is typically a letter+number code. Required. + :paramtype name: str + :keyword tier: This field is required to be implemented by the Resource Provider if the service + has more than one tier, but is not required on a PUT. Known values are: "Free", "Basic", + "Standard", and "Premium". + :paramtype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier + :keyword size: The SKU size. When the name field is the combination of tier and some other + value, this would be the standalone code. + :paramtype size: str + :keyword family: If the service has different generations of hardware, for the same SKU, then + that can be captured here. + :paramtype family: str + :keyword capacity: If the SKU supports scale out/in then the capacity integer should be + included. If scale out/in is not possible for the resource this may be omitted. + :paramtype capacity: int + """ + super().__init__(**kwargs) + self.name = name + self.tier = tier + self.size = size + self.family = family + self.capacity = capacity + + +class SkuCapacity(_serialization.Model): + """SKU capacity information. + + :ivar default: Gets or sets the default capacity. + :vartype default: int + :ivar maximum: Gets or sets the maximum. + :vartype maximum: int + :ivar minimum: Gets or sets the minimum. + :vartype minimum: int + :ivar scale_type: Gets or sets the type of the scale. Known values are: "Automatic", "Manual", + and "None". + :vartype scale_type: str or ~azure.mgmt.machinelearningservices.models.SkuScaleType """ _attribute_map = { - 'ssh_port': {'key': 'sshPort', 'type': 'int'}, - 'address': {'key': 'address', 'type': 'str'}, - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + "default": {"key": "default", "type": "int"}, + "maximum": {"key": "maximum", "type": "int"}, + "minimum": {"key": "minimum", "type": "int"}, + "scale_type": {"key": "scaleType", "type": "str"}, } def __init__( self, *, - ssh_port: Optional[int] = None, - address: Optional[str] = None, - administrator_account: Optional["VirtualMachineSshCredentials"] = None, + default: int = 0, + maximum: int = 0, + minimum: int = 0, + scale_type: Optional[Union[str, "_models.SkuScaleType"]] = None, **kwargs ): - super(HDInsightProperties, self).__init__(**kwargs) - self.ssh_port = ssh_port - self.address = address - self.administrator_account = administrator_account - - -class Identity(msrest.serialization.Model): - """Identity for the resource. + """ + :keyword default: Gets or sets the default capacity. + :paramtype default: int + :keyword maximum: Gets or sets the maximum. + :paramtype maximum: int + :keyword minimum: Gets or sets the minimum. + :paramtype minimum: int + :keyword scale_type: Gets or sets the type of the scale. Known values are: "Automatic", + "Manual", and "None". + :paramtype scale_type: str or ~azure.mgmt.machinelearningservices.models.SkuScaleType + """ + super().__init__(**kwargs) + self.default = default + self.maximum = maximum + self.minimum = minimum + self.scale_type = scale_type + + +class SkuResource(_serialization.Model): + """Fulfills ARM Contract requirement to list all available SKUS for a resource. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - - :ivar principal_id: The principal ID of resource identity. - :vartype principal_id: str - :ivar tenant_id: The tenant ID of resource. - :vartype tenant_id: str - :param type: Required. The identity type. Possible values include: "SystemAssigned", - "UserAssigned", "SystemAssigned,UserAssigned", "None". - :type type: str or ~azure.mgmt.machinelearningservices.models.ResourceIdentityType - :param user_assigned_identities: The list of user identities associated with resource. The user - identity dictionary key references will be ARM resource ids in the form: - '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. - :type user_assigned_identities: dict[str, - ~azure.mgmt.machinelearningservices.models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties] + :ivar capacity: Gets or sets the Sku Capacity. + :vartype capacity: ~azure.mgmt.machinelearningservices.models.SkuCapacity + :ivar resource_type: The resource type name. + :vartype resource_type: str + :ivar sku: Gets or sets the Sku. + :vartype sku: ~azure.mgmt.machinelearningservices.models.SkuSetting """ _validation = { - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, - 'type': {'required': True}, + "resource_type": {"readonly": True}, } _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties}'}, + "capacity": {"key": "capacity", "type": "SkuCapacity"}, + "resource_type": {"key": "resourceType", "type": "str"}, + "sku": {"key": "sku", "type": "SkuSetting"}, } def __init__( - self, - *, - type: Union[str, "ResourceIdentityType"], - user_assigned_identities: Optional[Dict[str, "ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties"]] = None, - **kwargs + self, *, capacity: Optional["_models.SkuCapacity"] = None, sku: Optional["_models.SkuSetting"] = None, **kwargs ): - super(Identity, self).__init__(**kwargs) - self.principal_id = None - self.tenant_id = None - self.type = type - self.user_assigned_identities = user_assigned_identities + """ + :keyword capacity: Gets or sets the Sku Capacity. + :paramtype capacity: ~azure.mgmt.machinelearningservices.models.SkuCapacity + :keyword sku: Gets or sets the Sku. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.SkuSetting + """ + super().__init__(**kwargs) + self.capacity = capacity + self.resource_type = None + self.sku = sku + + +class SkuResourceArmPaginatedResult(_serialization.Model): + """A paginated list of SkuResource entities. + + :ivar next_link: The link to the next page of SkuResource objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type SkuResource. + :vartype value: list[~azure.mgmt.machinelearningservices.models.SkuResource] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[SkuResource]"}, + } + def __init__( + self, *, next_link: Optional[str] = None, value: Optional[List["_models.SkuResource"]] = None, **kwargs + ): + """ + :keyword next_link: The link to the next page of SkuResource objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type SkuResource. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.SkuResource] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value -class KeyVaultProperties(msrest.serialization.Model): - """KeyVaultProperties. + +class SkuSetting(_serialization.Model): + """SkuSetting fulfills the need for stripped down SKU info in ARM contract. All required parameters must be populated in order to send to Azure. - :param key_vault_arm_id: Required. The ArmId of the keyVault where the customer owned - encryption key is present. - :type key_vault_arm_id: str - :param key_identifier: Required. Key vault uri to access the encryption key. - :type key_identifier: str - :param identity_client_id: For future use - The client id of the identity which will be used to - access key vault. - :type identity_client_id: str + :ivar name: [Required] The name of the SKU. Ex - P3. It is typically a letter+number code. + Required. + :vartype name: str + :ivar tier: This field is required to be implemented by the Resource Provider if the service + has more than one tier, but is not required on a PUT. Known values are: "Free", "Basic", + "Standard", and "Premium". + :vartype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier """ _validation = { - 'key_vault_arm_id': {'required': True}, - 'key_identifier': {'required': True}, + "name": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'key_vault_arm_id': {'key': 'keyVaultArmId', 'type': 'str'}, - 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'}, - 'identity_client_id': {'key': 'identityClientId', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, + "tier": {"key": "tier", "type": "str"}, } - def __init__( - self, - *, - key_vault_arm_id: str, - key_identifier: str, - identity_client_id: Optional[str] = None, - **kwargs - ): - super(KeyVaultProperties, self).__init__(**kwargs) - self.key_vault_arm_id = key_vault_arm_id - self.key_identifier = key_identifier - self.identity_client_id = identity_client_id + def __init__(self, *, name: str, tier: Optional[Union[str, "_models.SkuTier"]] = None, **kwargs): + """ + :keyword name: [Required] The name of the SKU. Ex - P3. It is typically a letter+number code. + Required. + :paramtype name: str + :keyword tier: This field is required to be implemented by the Resource Provider if the service + has more than one tier, but is not required on a PUT. Known values are: "Free", "Basic", + "Standard", and "Premium". + :paramtype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier + """ + super().__init__(**kwargs) + self.name = name + self.tier = tier -class ListAmlUserFeatureResult(msrest.serialization.Model): - """The List Aml user feature operation response. +class SparkJob(JobBaseProperties): # pylint: disable=too-many-instance-attributes + """Spark job definition. Variables are only populated by the server, and will be ignored when sending a request. - :ivar value: The list of AML user facing features. - :vartype value: list[~azure.mgmt.machinelearningservices.models.AmlUserFeature] - :ivar next_link: The URI to fetch the next page of AML user features information. Call - ListNext() with this to fetch the next page of AML user features information. - :vartype next_link: str + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar component_id: ARM resource ID of the component resource. + :vartype component_id: str + :ivar compute_id: ARM resource ID of the compute resource. + :vartype compute_id: str + :ivar display_name: Display name of job. + :vartype display_name: str + :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :vartype experiment_name: str + :ivar identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar job_type: [Required] Specifies the type of job. Required. Known values are: "AutoML", + "Command", "Labeling", "Sweep", "Pipeline", and "Spark". + :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :ivar services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", and "Scheduled". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus + :ivar archives: Archive files used in the job. + :vartype archives: list[str] + :ivar args: Arguments for the job. + :vartype args: str + :ivar code_id: [Required] ARM resource ID of the code asset. Required. + :vartype code_id: str + :ivar conf: Spark configured properties. + :vartype conf: dict[str, str] + :ivar entry: [Required] The entry to execute on startup of the job. Required. + :vartype entry: ~azure.mgmt.machinelearningservices.models.SparkJobEntry + :ivar environment_id: The ARM resource ID of the Environment specification for the job. + :vartype environment_id: str + :ivar files: Files used in the job. + :vartype files: list[str] + :ivar inputs: Mapping of input data bindings used in the job. + :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] + :ivar jars: Jar files used in the job. + :vartype jars: list[str] + :ivar outputs: Mapping of output data bindings used in the job. + :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :ivar py_files: Python files used in the job. + :vartype py_files: list[str] + :ivar resources: Compute Resource configuration for the job. + :vartype resources: ~azure.mgmt.machinelearningservices.models.SparkResourceConfiguration """ _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, + "job_type": {"required": True}, + "status": {"readonly": True}, + "code_id": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + "entry": {"required": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[AmlUserFeature]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, + "archives": {"key": "archives", "type": "[str]"}, + "args": {"key": "args", "type": "str"}, + "code_id": {"key": "codeId", "type": "str"}, + "conf": {"key": "conf", "type": "{str}"}, + "entry": {"key": "entry", "type": "SparkJobEntry"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "files": {"key": "files", "type": "[str]"}, + "inputs": {"key": "inputs", "type": "{JobInput}"}, + "jars": {"key": "jars", "type": "[str]"}, + "outputs": {"key": "outputs", "type": "{JobOutput}"}, + "py_files": {"key": "pyFiles", "type": "[str]"}, + "resources": {"key": "resources", "type": "SparkResourceConfiguration"}, } - def __init__( + def __init__( # pylint: disable=too-many-locals self, + *, + code_id: str, + entry: "_models.SparkJobEntry", + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + component_id: Optional[str] = None, + compute_id: Optional[str] = None, + display_name: Optional[str] = None, + experiment_name: str = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: bool = False, + services: Optional[Dict[str, "_models.JobService"]] = None, + archives: Optional[List[str]] = None, + args: Optional[str] = None, + conf: Optional[Dict[str, str]] = None, + environment_id: Optional[str] = None, + files: Optional[List[str]] = None, + inputs: Optional[Dict[str, "_models.JobInput"]] = None, + jars: Optional[List[str]] = None, + outputs: Optional[Dict[str, "_models.JobOutput"]] = None, + py_files: Optional[List[str]] = None, + resources: Optional["_models.SparkResourceConfiguration"] = None, **kwargs ): - super(ListAmlUserFeatureResult, self).__init__(**kwargs) - self.value = None - self.next_link = None + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword component_id: ARM resource ID of the component resource. + :paramtype component_id: str + :keyword compute_id: ARM resource ID of the compute resource. + :paramtype compute_id: str + :keyword display_name: Display name of job. + :paramtype display_name: str + :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :paramtype experiment_name: str + :keyword identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :keyword archives: Archive files used in the job. + :paramtype archives: list[str] + :keyword args: Arguments for the job. + :paramtype args: str + :keyword code_id: [Required] ARM resource ID of the code asset. Required. + :paramtype code_id: str + :keyword conf: Spark configured properties. + :paramtype conf: dict[str, str] + :keyword entry: [Required] The entry to execute on startup of the job. Required. + :paramtype entry: ~azure.mgmt.machinelearningservices.models.SparkJobEntry + :keyword environment_id: The ARM resource ID of the Environment specification for the job. + :paramtype environment_id: str + :keyword files: Files used in the job. + :paramtype files: list[str] + :keyword inputs: Mapping of input data bindings used in the job. + :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] + :keyword jars: Jar files used in the job. + :paramtype jars: list[str] + :keyword outputs: Mapping of output data bindings used in the job. + :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :keyword py_files: Python files used in the job. + :paramtype py_files: list[str] + :keyword resources: Compute Resource configuration for the job. + :paramtype resources: ~azure.mgmt.machinelearningservices.models.SparkResourceConfiguration + """ + super().__init__( + description=description, + properties=properties, + tags=tags, + component_id=component_id, + compute_id=compute_id, + display_name=display_name, + experiment_name=experiment_name, + identity=identity, + is_archived=is_archived, + services=services, + **kwargs + ) + self.job_type = "Spark" # type: str + self.archives = archives + self.args = args + self.code_id = code_id + self.conf = conf + self.entry = entry + self.environment_id = environment_id + self.files = files + self.inputs = inputs + self.jars = jars + self.outputs = outputs + self.py_files = py_files + self.resources = resources + + +class SparkJobEntry(_serialization.Model): + """Spark job entry point definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + SparkJobPythonEntry, SparkJobScalaEntry + All required parameters must be populated in order to send to Azure. -class ListUsagesResult(msrest.serialization.Model): - """The List Usages operation response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The list of AML resource usages. - :vartype value: list[~azure.mgmt.machinelearningservices.models.Usage] - :ivar next_link: The URI to fetch the next page of AML resource usage information. Call - ListNext() with this to fetch the next page of AML resource usage information. - :vartype next_link: str + :ivar spark_job_entry_type: [Required] Type of the job's entry point. Required. Known values + are: "SparkJobPythonEntry" and "SparkJobScalaEntry". + :vartype spark_job_entry_type: str or + ~azure.mgmt.machinelearningservices.models.SparkJobEntryType """ _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, + "spark_job_entry_type": {"required": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[Usage]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "spark_job_entry_type": {"key": "sparkJobEntryType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - super(ListUsagesResult, self).__init__(**kwargs) - self.value = None - self.next_link = None + _subtype_map = { + "spark_job_entry_type": { + "SparkJobPythonEntry": "SparkJobPythonEntry", + "SparkJobScalaEntry": "SparkJobScalaEntry", + } + } + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.spark_job_entry_type = None # type: Optional[str] -class ListWorkspaceKeysResult(msrest.serialization.Model): - """ListWorkspaceKeysResult. - Variables are only populated by the server, and will be ignored when sending a request. +class SparkJobPythonEntry(SparkJobEntry): + """SparkJobPythonEntry. - :ivar user_storage_key: - :vartype user_storage_key: str - :ivar user_storage_resource_id: - :vartype user_storage_resource_id: str - :ivar app_insights_instrumentation_key: - :vartype app_insights_instrumentation_key: str - :ivar container_registry_credentials: - :vartype container_registry_credentials: - ~azure.mgmt.machinelearningservices.models.RegistryListCredentialsResult - :param notebook_access_keys: - :type notebook_access_keys: - ~azure.mgmt.machinelearningservices.models.NotebookListCredentialsResult + All required parameters must be populated in order to send to Azure. + + :ivar spark_job_entry_type: [Required] Type of the job's entry point. Required. Known values + are: "SparkJobPythonEntry" and "SparkJobScalaEntry". + :vartype spark_job_entry_type: str or + ~azure.mgmt.machinelearningservices.models.SparkJobEntryType + :ivar file: [Required] Relative python file path for job entry point. Required. + :vartype file: str """ _validation = { - 'user_storage_key': {'readonly': True}, - 'user_storage_resource_id': {'readonly': True}, - 'app_insights_instrumentation_key': {'readonly': True}, - 'container_registry_credentials': {'readonly': True}, + "spark_job_entry_type": {"required": True}, + "file": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'}, - 'user_storage_resource_id': {'key': 'userStorageResourceId', 'type': 'str'}, - 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'}, - 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'}, - 'notebook_access_keys': {'key': 'notebookAccessKeys', 'type': 'NotebookListCredentialsResult'}, + "spark_job_entry_type": {"key": "sparkJobEntryType", "type": "str"}, + "file": {"key": "file", "type": "str"}, } - def __init__( - self, - *, - notebook_access_keys: Optional["NotebookListCredentialsResult"] = None, - **kwargs - ): - super(ListWorkspaceKeysResult, self).__init__(**kwargs) - self.user_storage_key = None - self.user_storage_resource_id = None - self.app_insights_instrumentation_key = None - self.container_registry_credentials = None - self.notebook_access_keys = notebook_access_keys + def __init__(self, *, file: str, **kwargs): + """ + :keyword file: [Required] Relative python file path for job entry point. Required. + :paramtype file: str + """ + super().__init__(**kwargs) + self.spark_job_entry_type = "SparkJobPythonEntry" # type: str + self.file = file -class ListWorkspaceQuotas(msrest.serialization.Model): - """The List WorkspaceQuotasByVMFamily operation response. +class SparkJobScalaEntry(SparkJobEntry): + """SparkJobScalaEntry. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar value: The list of Workspace Quotas by VM Family. - :vartype value: list[~azure.mgmt.machinelearningservices.models.ResourceQuota] - :ivar next_link: The URI to fetch the next page of workspace quota information by VM Family. - Call ListNext() with this to fetch the next page of Workspace Quota information. - :vartype next_link: str + :ivar spark_job_entry_type: [Required] Type of the job's entry point. Required. Known values + are: "SparkJobPythonEntry" and "SparkJobScalaEntry". + :vartype spark_job_entry_type: str or + ~azure.mgmt.machinelearningservices.models.SparkJobEntryType + :ivar class_name: [Required] Scala class name used as entry point. Required. + :vartype class_name: str """ _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, + "spark_job_entry_type": {"required": True}, + "class_name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[ResourceQuota]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "spark_job_entry_type": {"key": "sparkJobEntryType", "type": "str"}, + "class_name": {"key": "className", "type": "str"}, } - def __init__( - self, - **kwargs - ): - super(ListWorkspaceQuotas, self).__init__(**kwargs) - self.value = None - self.next_link = None + def __init__(self, *, class_name: str, **kwargs): + """ + :keyword class_name: [Required] Scala class name used as entry point. Required. + :paramtype class_name: str + """ + super().__init__(**kwargs) + self.spark_job_entry_type = "SparkJobScalaEntry" # type: str + self.class_name = class_name -class MachineLearningServiceError(msrest.serialization.Model): - """Wrapper for error response to follow ARM guidelines. - - Variables are only populated by the server, and will be ignored when sending a request. +class SparkResourceConfiguration(_serialization.Model): + """SparkResourceConfiguration. - :ivar error: The error response. - :vartype error: ~azure.mgmt.machinelearningservices.models.ErrorResponse + :ivar instance_type: Optional type of VM used as supported by the compute target. + :vartype instance_type: str + :ivar runtime_version: Version of spark runtime used for the job. + :vartype runtime_version: str """ - _validation = { - 'error': {'readonly': True}, + _attribute_map = { + "instance_type": {"key": "instanceType", "type": "str"}, + "runtime_version": {"key": "runtimeVersion", "type": "str"}, } + def __init__(self, *, instance_type: Optional[str] = None, runtime_version: str = "3.1", **kwargs): + """ + :keyword instance_type: Optional type of VM used as supported by the compute target. + :paramtype instance_type: str + :keyword runtime_version: Version of spark runtime used for the job. + :paramtype runtime_version: str + """ + super().__init__(**kwargs) + self.instance_type = instance_type + self.runtime_version = runtime_version + + +class SslConfiguration(_serialization.Model): + """The ssl configuration for scoring. + + :ivar status: Enable or disable ssl for scoring. Known values are: "Disabled", "Enabled", and + "Auto". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.SslConfigStatus + :ivar cert: Cert data. + :vartype cert: str + :ivar key: Key data. + :vartype key: str + :ivar cname: CNAME of the cert. + :vartype cname: str + :ivar leaf_domain_label: Leaf domain label of public endpoint. + :vartype leaf_domain_label: str + :ivar overwrite_existing_domain: Indicates whether to overwrite existing domain label. + :vartype overwrite_existing_domain: bool + """ + _attribute_map = { - 'error': {'key': 'error', 'type': 'ErrorResponse'}, + "status": {"key": "status", "type": "str"}, + "cert": {"key": "cert", "type": "str"}, + "key": {"key": "key", "type": "str"}, + "cname": {"key": "cname", "type": "str"}, + "leaf_domain_label": {"key": "leafDomainLabel", "type": "str"}, + "overwrite_existing_domain": {"key": "overwriteExistingDomain", "type": "bool"}, } def __init__( self, + *, + status: Optional[Union[str, "_models.SslConfigStatus"]] = None, + cert: Optional[str] = None, + key: Optional[str] = None, + cname: Optional[str] = None, + leaf_domain_label: Optional[str] = None, + overwrite_existing_domain: Optional[bool] = None, **kwargs ): - super(MachineLearningServiceError, self).__init__(**kwargs) - self.error = None + """ + :keyword status: Enable or disable ssl for scoring. Known values are: "Disabled", "Enabled", + and "Auto". + :paramtype status: str or ~azure.mgmt.machinelearningservices.models.SslConfigStatus + :keyword cert: Cert data. + :paramtype cert: str + :keyword key: Key data. + :paramtype key: str + :keyword cname: CNAME of the cert. + :paramtype cname: str + :keyword leaf_domain_label: Leaf domain label of public endpoint. + :paramtype leaf_domain_label: str + :keyword overwrite_existing_domain: Indicates whether to overwrite existing domain label. + :paramtype overwrite_existing_domain: bool + """ + super().__init__(**kwargs) + self.status = status + self.cert = cert + self.key = key + self.cname = cname + self.leaf_domain_label = leaf_domain_label + self.overwrite_existing_domain = overwrite_existing_domain + + +class StackEnsembleSettings(_serialization.Model): + """Advances setting to customize StackEnsemble run. + + :ivar stack_meta_learner_k_wargs: Optional parameters to pass to the initializer of the + meta-learner. + :vartype stack_meta_learner_k_wargs: JSON + :ivar stack_meta_learner_train_percentage: Specifies the proportion of the training set (when + choosing train and validation type of training) to be reserved for training the meta-learner. + Default value is 0.2. + :vartype stack_meta_learner_train_percentage: float + :ivar stack_meta_learner_type: The meta-learner is a model trained on the output of the + individual heterogeneous models. Known values are: "None", "LogisticRegression", + "LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV", + "LightGBMRegressor", and "LinearRegression". + :vartype stack_meta_learner_type: str or + ~azure.mgmt.machinelearningservices.models.StackMetaLearnerType + """ + _attribute_map = { + "stack_meta_learner_k_wargs": {"key": "stackMetaLearnerKWargs", "type": "object"}, + "stack_meta_learner_train_percentage": {"key": "stackMetaLearnerTrainPercentage", "type": "float"}, + "stack_meta_learner_type": {"key": "stackMetaLearnerType", "type": "str"}, + } -class NodeStateCounts(msrest.serialization.Model): - """Counts of various compute node states on the amlCompute. + def __init__( + self, + *, + stack_meta_learner_k_wargs: Optional[JSON] = None, + stack_meta_learner_train_percentage: float = 0.2, + stack_meta_learner_type: Optional[Union[str, "_models.StackMetaLearnerType"]] = None, + **kwargs + ): + """ + :keyword stack_meta_learner_k_wargs: Optional parameters to pass to the initializer of the + meta-learner. + :paramtype stack_meta_learner_k_wargs: JSON + :keyword stack_meta_learner_train_percentage: Specifies the proportion of the training set + (when choosing train and validation type of training) to be reserved for training the + meta-learner. Default value is 0.2. + :paramtype stack_meta_learner_train_percentage: float + :keyword stack_meta_learner_type: The meta-learner is a model trained on the output of the + individual heterogeneous models. Known values are: "None", "LogisticRegression", + "LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV", + "LightGBMRegressor", and "LinearRegression". + :paramtype stack_meta_learner_type: str or + ~azure.mgmt.machinelearningservices.models.StackMetaLearnerType + """ + super().__init__(**kwargs) + self.stack_meta_learner_k_wargs = stack_meta_learner_k_wargs + self.stack_meta_learner_train_percentage = stack_meta_learner_train_percentage + self.stack_meta_learner_type = stack_meta_learner_type + + +class StatusMessage(_serialization.Model): + """Active message associated with project. Variables are only populated by the server, and will be ignored when sending a request. - :ivar idle_node_count: Number of compute nodes in idle state. - :vartype idle_node_count: int - :ivar running_node_count: Number of compute nodes which are running jobs. - :vartype running_node_count: int - :ivar preparing_node_count: Number of compute nodes which are being prepared. - :vartype preparing_node_count: int - :ivar unusable_node_count: Number of compute nodes which are in unusable state. - :vartype unusable_node_count: int - :ivar leaving_node_count: Number of compute nodes which are leaving the amlCompute. - :vartype leaving_node_count: int - :ivar preempted_node_count: Number of compute nodes which are in preempted state. - :vartype preempted_node_count: int + :ivar code: Service-defined message code. + :vartype code: str + :ivar created_date_time: Time in UTC at which the message was created. + :vartype created_date_time: ~datetime.datetime + :ivar level: Severity level of message. Known values are: "Error", "Information", and + "Warning". + :vartype level: str or ~azure.mgmt.machinelearningservices.models.StatusMessageLevel + :ivar message: A human-readable representation of the message code. + :vartype message: str """ _validation = { - 'idle_node_count': {'readonly': True}, - 'running_node_count': {'readonly': True}, - 'preparing_node_count': {'readonly': True}, - 'unusable_node_count': {'readonly': True}, - 'leaving_node_count': {'readonly': True}, - 'preempted_node_count': {'readonly': True}, + "code": {"readonly": True}, + "created_date_time": {"readonly": True}, + "level": {"readonly": True}, + "message": {"readonly": True}, } _attribute_map = { - 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'}, - 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'}, - 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'}, - 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'}, - 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'}, - 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'}, + "code": {"key": "code", "type": "str"}, + "created_date_time": {"key": "createdDateTime", "type": "iso-8601"}, + "level": {"key": "level", "type": "str"}, + "message": {"key": "message", "type": "str"}, } - def __init__( - self, - **kwargs - ): - super(NodeStateCounts, self).__init__(**kwargs) - self.idle_node_count = None - self.running_node_count = None - self.preparing_node_count = None - self.unusable_node_count = None - self.leaving_node_count = None - self.preempted_node_count = None + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.code = None + self.created_date_time = None + self.level = None + self.message = None -class NotebookListCredentialsResult(msrest.serialization.Model): - """NotebookListCredentialsResult. +class StorageAccountDetails(_serialization.Model): + """Details of storage account to be used for the Registry. - :param primary_access_key: - :type primary_access_key: str - :param secondary_access_key: - :type secondary_access_key: str + :ivar system_created_storage_account: + :vartype system_created_storage_account: + ~azure.mgmt.machinelearningservices.models.SystemCreatedStorageAccount + :ivar user_created_storage_account: + :vartype user_created_storage_account: + ~azure.mgmt.machinelearningservices.models.UserCreatedStorageAccount """ _attribute_map = { - 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'}, - 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'}, + "system_created_storage_account": {"key": "systemCreatedStorageAccount", "type": "SystemCreatedStorageAccount"}, + "user_created_storage_account": {"key": "userCreatedStorageAccount", "type": "UserCreatedStorageAccount"}, } def __init__( self, *, - primary_access_key: Optional[str] = None, - secondary_access_key: Optional[str] = None, + system_created_storage_account: Optional["_models.SystemCreatedStorageAccount"] = None, + user_created_storage_account: Optional["_models.UserCreatedStorageAccount"] = None, **kwargs ): - super(NotebookListCredentialsResult, self).__init__(**kwargs) - self.primary_access_key = primary_access_key - self.secondary_access_key = secondary_access_key + """ + :keyword system_created_storage_account: + :paramtype system_created_storage_account: + ~azure.mgmt.machinelearningservices.models.SystemCreatedStorageAccount + :keyword user_created_storage_account: + :paramtype user_created_storage_account: + ~azure.mgmt.machinelearningservices.models.UserCreatedStorageAccount + """ + super().__init__(**kwargs) + self.system_created_storage_account = system_created_storage_account + self.user_created_storage_account = user_created_storage_account + + +class SweepJob(JobBaseProperties): # pylint: disable=too-many-instance-attributes + """Sweep job definition. + Variables are only populated by the server, and will be ignored when sending a request. -class NotebookPreparationError(msrest.serialization.Model): - """NotebookPreparationError. + All required parameters must be populated in order to send to Azure. - :param error_message: - :type error_message: str - :param status_code: - :type status_code: int + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar component_id: ARM resource ID of the component resource. + :vartype component_id: str + :ivar compute_id: ARM resource ID of the compute resource. + :vartype compute_id: str + :ivar display_name: Display name of job. + :vartype display_name: str + :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :vartype experiment_name: str + :ivar identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar job_type: [Required] Specifies the type of job. Required. Known values are: "AutoML", + "Command", "Labeling", "Sweep", "Pipeline", and "Spark". + :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :ivar services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", and "Scheduled". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus + :ivar early_termination: Early termination policies enable canceling poor-performing runs + before they complete. + :vartype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy + :ivar inputs: Mapping of input data bindings used in the job. + :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] + :ivar limits: Sweep Job limit. + :vartype limits: ~azure.mgmt.machinelearningservices.models.SweepJobLimits + :ivar objective: [Required] Optimization objective. Required. + :vartype objective: ~azure.mgmt.machinelearningservices.models.Objective + :ivar outputs: Mapping of output data bindings used in the job. + :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :ivar sampling_algorithm: [Required] The hyperparameter sampling algorithm. Required. + :vartype sampling_algorithm: ~azure.mgmt.machinelearningservices.models.SamplingAlgorithm + :ivar search_space: [Required] A dictionary containing each parameter and its distribution. The + dictionary key is the name of the parameter. Required. + :vartype search_space: JSON + :ivar trial: [Required] Trial component definition. Required. + :vartype trial: ~azure.mgmt.machinelearningservices.models.TrialComponent """ + _validation = { + "job_type": {"required": True}, + "status": {"readonly": True}, + "objective": {"required": True}, + "sampling_algorithm": {"required": True}, + "search_space": {"required": True}, + "trial": {"required": True}, + } + _attribute_map = { - 'error_message': {'key': 'errorMessage', 'type': 'str'}, - 'status_code': {'key': 'statusCode', 'type': 'int'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, + "early_termination": {"key": "earlyTermination", "type": "EarlyTerminationPolicy"}, + "inputs": {"key": "inputs", "type": "{JobInput}"}, + "limits": {"key": "limits", "type": "SweepJobLimits"}, + "objective": {"key": "objective", "type": "Objective"}, + "outputs": {"key": "outputs", "type": "{JobOutput}"}, + "sampling_algorithm": {"key": "samplingAlgorithm", "type": "SamplingAlgorithm"}, + "search_space": {"key": "searchSpace", "type": "object"}, + "trial": {"key": "trial", "type": "TrialComponent"}, } def __init__( self, *, - error_message: Optional[str] = None, - status_code: Optional[int] = None, + objective: "_models.Objective", + sampling_algorithm: "_models.SamplingAlgorithm", + search_space: JSON, + trial: "_models.TrialComponent", + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + component_id: Optional[str] = None, + compute_id: Optional[str] = None, + display_name: Optional[str] = None, + experiment_name: str = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: bool = False, + services: Optional[Dict[str, "_models.JobService"]] = None, + early_termination: Optional["_models.EarlyTerminationPolicy"] = None, + inputs: Optional[Dict[str, "_models.JobInput"]] = None, + limits: Optional["_models.SweepJobLimits"] = None, + outputs: Optional[Dict[str, "_models.JobOutput"]] = None, **kwargs ): - super(NotebookPreparationError, self).__init__(**kwargs) - self.error_message = error_message - self.status_code = status_code + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword component_id: ARM resource ID of the component resource. + :paramtype component_id: str + :keyword compute_id: ARM resource ID of the compute resource. + :paramtype compute_id: str + :keyword display_name: Display name of job. + :paramtype display_name: str + :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :paramtype experiment_name: str + :keyword identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :keyword early_termination: Early termination policies enable canceling poor-performing runs + before they complete. + :paramtype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy + :keyword inputs: Mapping of input data bindings used in the job. + :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] + :keyword limits: Sweep Job limit. + :paramtype limits: ~azure.mgmt.machinelearningservices.models.SweepJobLimits + :keyword objective: [Required] Optimization objective. Required. + :paramtype objective: ~azure.mgmt.machinelearningservices.models.Objective + :keyword outputs: Mapping of output data bindings used in the job. + :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :keyword sampling_algorithm: [Required] The hyperparameter sampling algorithm. Required. + :paramtype sampling_algorithm: ~azure.mgmt.machinelearningservices.models.SamplingAlgorithm + :keyword search_space: [Required] A dictionary containing each parameter and its distribution. + The dictionary key is the name of the parameter. Required. + :paramtype search_space: JSON + :keyword trial: [Required] Trial component definition. Required. + :paramtype trial: ~azure.mgmt.machinelearningservices.models.TrialComponent + """ + super().__init__( + description=description, + properties=properties, + tags=tags, + component_id=component_id, + compute_id=compute_id, + display_name=display_name, + experiment_name=experiment_name, + identity=identity, + is_archived=is_archived, + services=services, + **kwargs + ) + self.job_type = "Sweep" # type: str + self.early_termination = early_termination + self.inputs = inputs + self.limits = limits + self.objective = objective + self.outputs = outputs + self.sampling_algorithm = sampling_algorithm + self.search_space = search_space + self.trial = trial + + +class SweepJobLimits(JobLimits): + """Sweep Job limit class. + All required parameters must be populated in order to send to Azure. -class NotebookResourceInfo(msrest.serialization.Model): - """NotebookResourceInfo. - - :param fqdn: - :type fqdn: str - :param resource_id: the data plane resourceId that used to initialize notebook component. - :type resource_id: str - :param notebook_preparation_error: The error that occurs when preparing notebook. - :type notebook_preparation_error: - ~azure.mgmt.machinelearningservices.models.NotebookPreparationError + :ivar job_limits_type: [Required] JobLimit type. Required. Known values are: "Command" and + "Sweep". + :vartype job_limits_type: str or ~azure.mgmt.machinelearningservices.models.JobLimitsType + :ivar timeout: The max run duration in ISO 8601 format, after which the job will be cancelled. + Only supports duration with precision as low as Seconds. + :vartype timeout: ~datetime.timedelta + :ivar max_concurrent_trials: Sweep Job max concurrent trials. + :vartype max_concurrent_trials: int + :ivar max_total_trials: Sweep Job max total trials. + :vartype max_total_trials: int + :ivar trial_timeout: Sweep Job Trial timeout value. + :vartype trial_timeout: ~datetime.timedelta """ + _validation = { + "job_limits_type": {"required": True}, + } + _attribute_map = { - 'fqdn': {'key': 'fqdn', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'notebook_preparation_error': {'key': 'notebookPreparationError', 'type': 'NotebookPreparationError'}, + "job_limits_type": {"key": "jobLimitsType", "type": "str"}, + "timeout": {"key": "timeout", "type": "duration"}, + "max_concurrent_trials": {"key": "maxConcurrentTrials", "type": "int"}, + "max_total_trials": {"key": "maxTotalTrials", "type": "int"}, + "trial_timeout": {"key": "trialTimeout", "type": "duration"}, } def __init__( self, *, - fqdn: Optional[str] = None, - resource_id: Optional[str] = None, - notebook_preparation_error: Optional["NotebookPreparationError"] = None, + timeout: Optional[datetime.timedelta] = None, + max_concurrent_trials: Optional[int] = None, + max_total_trials: Optional[int] = None, + trial_timeout: Optional[datetime.timedelta] = None, **kwargs ): - super(NotebookResourceInfo, self).__init__(**kwargs) - self.fqdn = fqdn - self.resource_id = resource_id - self.notebook_preparation_error = notebook_preparation_error + """ + :keyword timeout: The max run duration in ISO 8601 format, after which the job will be + cancelled. Only supports duration with precision as low as Seconds. + :paramtype timeout: ~datetime.timedelta + :keyword max_concurrent_trials: Sweep Job max concurrent trials. + :paramtype max_concurrent_trials: int + :keyword max_total_trials: Sweep Job max total trials. + :paramtype max_total_trials: int + :keyword trial_timeout: Sweep Job Trial timeout value. + :paramtype trial_timeout: ~datetime.timedelta + """ + super().__init__(timeout=timeout, **kwargs) + self.job_limits_type = "Sweep" # type: str + self.max_concurrent_trials = max_concurrent_trials + self.max_total_trials = max_total_trials + self.trial_timeout = trial_timeout + + +class SynapseSpark(Compute): # pylint: disable=too-many-instance-attributes + """A SynapseSpark compute. + Variables are only populated by the server, and will be ignored when sending a request. -class Operation(msrest.serialization.Model): - """Azure Machine Learning workspace REST API operation. + All required parameters must be populated in order to send to Azure. - :param name: Operation name: {provider}/{resource}/{operation}. - :type name: str - :param display: Display name of operation. - :type display: ~azure.mgmt.machinelearningservices.models.OperationDisplay + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ComputeProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: ~datetime.datetime + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool + :ivar properties: + :vartype properties: ~azure.mgmt.machinelearningservices.models.SynapseSparkProperties """ + _validation = { + "compute_type": {"required": True}, + "compute_location": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, + "disable_local_auth": {"readonly": True}, + } + _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display': {'key': 'display', 'type': 'OperationDisplay'}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, + "properties": {"key": "properties", "type": "SynapseSparkProperties"}, } def __init__( self, *, - name: Optional[str] = None, - display: Optional["OperationDisplay"] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + properties: Optional["_models.SynapseSparkProperties"] = None, **kwargs ): - super(Operation, self).__init__(**kwargs) - self.name = name - self.display = display - + """ + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + :keyword properties: + :paramtype properties: ~azure.mgmt.machinelearningservices.models.SynapseSparkProperties + """ + super().__init__(description=description, resource_id=resource_id, **kwargs) + self.compute_type = "SynapseSpark" # type: str + self.properties = properties -class OperationDisplay(msrest.serialization.Model): - """Display name of operation. - :param provider: The resource provider name: Microsoft.MachineLearningExperimentation. - :type provider: str - :param resource: The resource on which the operation is performed. - :type resource: str - :param operation: The operation that users can perform. - :type operation: str - :param description: The description for the operation. - :type description: str +class SynapseSparkProperties(_serialization.Model): + """SynapseSparkProperties. + + :ivar auto_scale_properties: Auto scale properties. + :vartype auto_scale_properties: ~azure.mgmt.machinelearningservices.models.AutoScaleProperties + :ivar auto_pause_properties: Auto pause properties. + :vartype auto_pause_properties: ~azure.mgmt.machinelearningservices.models.AutoPauseProperties + :ivar spark_version: Spark version. + :vartype spark_version: str + :ivar node_count: The number of compute nodes currently assigned to the compute. + :vartype node_count: int + :ivar node_size: Node size. + :vartype node_size: str + :ivar node_size_family: Node size family. + :vartype node_size_family: str + :ivar subscription_id: Azure subscription identifier. + :vartype subscription_id: str + :ivar resource_group: Name of the resource group in which workspace is located. + :vartype resource_group: str + :ivar workspace_name: Name of Azure Machine Learning workspace. + :vartype workspace_name: str + :ivar pool_name: Pool name. + :vartype pool_name: str """ _attribute_map = { - 'provider': {'key': 'provider', 'type': 'str'}, - 'resource': {'key': 'resource', 'type': 'str'}, - 'operation': {'key': 'operation', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, + "auto_scale_properties": {"key": "autoScaleProperties", "type": "AutoScaleProperties"}, + "auto_pause_properties": {"key": "autoPauseProperties", "type": "AutoPauseProperties"}, + "spark_version": {"key": "sparkVersion", "type": "str"}, + "node_count": {"key": "nodeCount", "type": "int"}, + "node_size": {"key": "nodeSize", "type": "str"}, + "node_size_family": {"key": "nodeSizeFamily", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, + "resource_group": {"key": "resourceGroup", "type": "str"}, + "workspace_name": {"key": "workspaceName", "type": "str"}, + "pool_name": {"key": "poolName", "type": "str"}, } def __init__( self, *, - provider: Optional[str] = None, - resource: Optional[str] = None, - operation: Optional[str] = None, - description: Optional[str] = None, + auto_scale_properties: Optional["_models.AutoScaleProperties"] = None, + auto_pause_properties: Optional["_models.AutoPauseProperties"] = None, + spark_version: Optional[str] = None, + node_count: Optional[int] = None, + node_size: Optional[str] = None, + node_size_family: Optional[str] = None, + subscription_id: Optional[str] = None, + resource_group: Optional[str] = None, + workspace_name: Optional[str] = None, + pool_name: Optional[str] = None, **kwargs ): - super(OperationDisplay, self).__init__(**kwargs) - self.provider = provider - self.resource = resource - self.operation = operation - self.description = description - - -class OperationListResult(msrest.serialization.Model): - """An array of operations supported by the resource provider. - - :param value: List of AML workspace operations supported by the AML workspace resource - provider. - :type value: list[~azure.mgmt.machinelearningservices.models.Operation] + """ + :keyword auto_scale_properties: Auto scale properties. + :paramtype auto_scale_properties: + ~azure.mgmt.machinelearningservices.models.AutoScaleProperties + :keyword auto_pause_properties: Auto pause properties. + :paramtype auto_pause_properties: + ~azure.mgmt.machinelearningservices.models.AutoPauseProperties + :keyword spark_version: Spark version. + :paramtype spark_version: str + :keyword node_count: The number of compute nodes currently assigned to the compute. + :paramtype node_count: int + :keyword node_size: Node size. + :paramtype node_size: str + :keyword node_size_family: Node size family. + :paramtype node_size_family: str + :keyword subscription_id: Azure subscription identifier. + :paramtype subscription_id: str + :keyword resource_group: Name of the resource group in which workspace is located. + :paramtype resource_group: str + :keyword workspace_name: Name of Azure Machine Learning workspace. + :paramtype workspace_name: str + :keyword pool_name: Pool name. + :paramtype pool_name: str + """ + super().__init__(**kwargs) + self.auto_scale_properties = auto_scale_properties + self.auto_pause_properties = auto_pause_properties + self.spark_version = spark_version + self.node_count = node_count + self.node_size = node_size + self.node_size_family = node_size_family + self.subscription_id = subscription_id + self.resource_group = resource_group + self.workspace_name = workspace_name + self.pool_name = pool_name + + +class SystemCreatedAcrAccount(_serialization.Model): + """SystemCreatedAcrAccount. + + :ivar acr_account_sku: + :vartype acr_account_sku: str + :ivar arm_resource_id: ARM ResourceId of a resource. + :vartype arm_resource_id: ~azure.mgmt.machinelearningservices.models.ArmResourceId """ _attribute_map = { - 'value': {'key': 'value', 'type': '[Operation]'}, + "acr_account_sku": {"key": "acrAccountSku", "type": "str"}, + "arm_resource_id": {"key": "armResourceId", "type": "ArmResourceId"}, } def __init__( self, *, - value: Optional[List["Operation"]] = None, + acr_account_sku: Optional[str] = None, + arm_resource_id: Optional["_models.ArmResourceId"] = None, **kwargs ): - super(OperationListResult, self).__init__(**kwargs) - self.value = value - - -class PaginatedComputeResourcesList(msrest.serialization.Model): - """Paginated list of Machine Learning compute objects wrapped in ARM resource envelope. - - :param value: An array of Machine Learning compute objects wrapped in ARM resource envelope. - :type value: list[~azure.mgmt.machinelearningservices.models.ComputeResource] - :param next_link: A continuation link (absolute URI) to the next page of results in the list. - :type next_link: str + """ + :keyword acr_account_sku: + :paramtype acr_account_sku: str + :keyword arm_resource_id: ARM ResourceId of a resource. + :paramtype arm_resource_id: ~azure.mgmt.machinelearningservices.models.ArmResourceId + """ + super().__init__(**kwargs) + self.acr_account_sku = acr_account_sku + self.arm_resource_id = arm_resource_id + + +class SystemCreatedStorageAccount(_serialization.Model): + """SystemCreatedStorageAccount. + + :ivar arm_resource_id: ARM ResourceId of a resource. + :vartype arm_resource_id: ~azure.mgmt.machinelearningservices.models.ArmResourceId + :ivar storage_account_hns_enabled: + :vartype storage_account_hns_enabled: bool + :ivar storage_account_type: Allowed values: + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Standard_GZRS", + "Standard_RAGZRS", + "Premium_LRS", + "Premium_ZRS". + :vartype storage_account_type: str """ _attribute_map = { - 'value': {'key': 'value', 'type': '[ComputeResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "arm_resource_id": {"key": "armResourceId", "type": "ArmResourceId"}, + "storage_account_hns_enabled": {"key": "storageAccountHnsEnabled", "type": "bool"}, + "storage_account_type": {"key": "storageAccountType", "type": "str"}, } def __init__( self, *, - value: Optional[List["ComputeResource"]] = None, - next_link: Optional[str] = None, + arm_resource_id: Optional["_models.ArmResourceId"] = None, + storage_account_hns_enabled: Optional[bool] = None, + storage_account_type: Optional[str] = None, **kwargs ): - super(PaginatedComputeResourcesList, self).__init__(**kwargs) - self.value = value - self.next_link = next_link - - -class PaginatedWorkspaceConnectionsList(msrest.serialization.Model): - """Paginated list of Workspace connection objects. - - :param value: An array of Workspace connection objects. - :type value: list[~azure.mgmt.machinelearningservices.models.WorkspaceConnection] - :param next_link: A continuation link (absolute URI) to the next page of results in the list. - :type next_link: str + """ + :keyword arm_resource_id: ARM ResourceId of a resource. + :paramtype arm_resource_id: ~azure.mgmt.machinelearningservices.models.ArmResourceId + :keyword storage_account_hns_enabled: + :paramtype storage_account_hns_enabled: bool + :keyword storage_account_type: Allowed values: + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Standard_GZRS", + "Standard_RAGZRS", + "Premium_LRS", + "Premium_ZRS". + :paramtype storage_account_type: str + """ + super().__init__(**kwargs) + self.arm_resource_id = arm_resource_id + self.storage_account_hns_enabled = storage_account_hns_enabled + self.storage_account_type = storage_account_type + + +class SystemData(_serialization.Model): + """Metadata pertaining to creation and last modification of the resource. + + :ivar created_by: The identity that created the resource. + :vartype created_by: str + :ivar created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", and "Key". + :vartype created_by_type: str or ~azure.mgmt.machinelearningservices.models.CreatedByType + :ivar created_at: The timestamp of resource creation (UTC). + :vartype created_at: ~datetime.datetime + :ivar last_modified_by: The identity that last modified the resource. + :vartype last_modified_by: str + :ivar last_modified_by_type: The type of identity that last modified the resource. Known values + are: "User", "Application", "ManagedIdentity", and "Key". + :vartype last_modified_by_type: str or ~azure.mgmt.machinelearningservices.models.CreatedByType + :ivar last_modified_at: The timestamp of resource last modification (UTC). + :vartype last_modified_at: ~datetime.datetime """ _attribute_map = { - 'value': {'key': 'value', 'type': '[WorkspaceConnection]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "created_by": {"key": "createdBy", "type": "str"}, + "created_by_type": {"key": "createdByType", "type": "str"}, + "created_at": {"key": "createdAt", "type": "iso-8601"}, + "last_modified_by": {"key": "lastModifiedBy", "type": "str"}, + "last_modified_by_type": {"key": "lastModifiedByType", "type": "str"}, + "last_modified_at": {"key": "lastModifiedAt", "type": "iso-8601"}, } def __init__( self, *, - value: Optional[List["WorkspaceConnection"]] = None, - next_link: Optional[str] = None, + created_by: Optional[str] = None, + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, + created_at: Optional[datetime.datetime] = None, + last_modified_by: Optional[str] = None, + last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, + last_modified_at: Optional[datetime.datetime] = None, **kwargs ): - super(PaginatedWorkspaceConnectionsList, self).__init__(**kwargs) - self.value = value - self.next_link = next_link - - -class Password(msrest.serialization.Model): - """Password. + """ + :keyword created_by: The identity that created the resource. + :paramtype created_by: str + :keyword created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", and "Key". + :paramtype created_by_type: str or ~azure.mgmt.machinelearningservices.models.CreatedByType + :keyword created_at: The timestamp of resource creation (UTC). + :paramtype created_at: ~datetime.datetime + :keyword last_modified_by: The identity that last modified the resource. + :paramtype last_modified_by: str + :keyword last_modified_by_type: The type of identity that last modified the resource. Known + values are: "User", "Application", "ManagedIdentity", and "Key". + :paramtype last_modified_by_type: str or + ~azure.mgmt.machinelearningservices.models.CreatedByType + :keyword last_modified_at: The timestamp of resource last modification (UTC). + :paramtype last_modified_at: ~datetime.datetime + """ + super().__init__(**kwargs) + self.created_by = created_by + self.created_by_type = created_by_type + self.created_at = created_at + self.last_modified_by = last_modified_by + self.last_modified_by_type = last_modified_by_type + self.last_modified_at = last_modified_at + + +class SystemService(_serialization.Model): + """A system service running on a compute. Variables are only populated by the server, and will be ignored when sending a request. - :ivar name: - :vartype name: str - :ivar value: - :vartype value: str + :ivar system_service_type: The type of this system service. + :vartype system_service_type: str + :ivar public_ip_address: Public IP address. + :vartype public_ip_address: str + :ivar version: The version for this type. + :vartype version: str """ _validation = { - 'name': {'readonly': True}, - 'value': {'readonly': True}, + "system_service_type": {"readonly": True}, + "public_ip_address": {"readonly": True}, + "version": {"readonly": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + "system_service_type": {"key": "systemServiceType", "type": "str"}, + "public_ip_address": {"key": "publicIpAddress", "type": "str"}, + "version": {"key": "version", "type": "str"}, } - def __init__( - self, - **kwargs - ): - super(Password, self).__init__(**kwargs) - self.name = None - self.value = None - - -class PrivateEndpoint(msrest.serialization.Model): - """The Private Endpoint resource. + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.system_service_type = None + self.public_ip_address = None + self.version = None - Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: The ARM identifier for Private Endpoint. - :vartype id: str +class TableFixedParameters(_serialization.Model): # pylint: disable=too-many-instance-attributes + """Fixed training parameters that won't be swept over during AutoML Table training. + + :ivar booster: Specify the boosting type, e.g gbdt for XGBoost. + :vartype booster: str + :ivar boosting_type: Specify the boosting type, e.g gbdt for LightGBM. + :vartype boosting_type: str + :ivar grow_policy: Specify the grow policy, which controls the way new nodes are added to the + tree. + :vartype grow_policy: str + :ivar learning_rate: The learning rate for the training procedure. + :vartype learning_rate: float + :ivar max_bin: Specify the Maximum number of discrete bins to bucket continuous features . + :vartype max_bin: int + :ivar max_depth: Specify the max depth to limit the tree depth explicitly. + :vartype max_depth: int + :ivar max_leaves: Specify the max leaves to limit the tree leaves explicitly. + :vartype max_leaves: int + :ivar min_data_in_leaf: The minimum number of data per leaf. + :vartype min_data_in_leaf: int + :ivar min_split_gain: Minimum loss reduction required to make a further partition on a leaf + node of the tree. + :vartype min_split_gain: float + :ivar model_name: The name of the model to train. + :vartype model_name: str + :ivar n_estimators: Specify the number of trees (or rounds) in an model. + :vartype n_estimators: int + :ivar num_leaves: Specify the number of leaves. + :vartype num_leaves: int + :ivar preprocessor_name: The name of the preprocessor to use. + :vartype preprocessor_name: str + :ivar reg_alpha: L1 regularization term on weights. + :vartype reg_alpha: float + :ivar reg_lambda: L2 regularization term on weights. + :vartype reg_lambda: float + :ivar subsample: Subsample ratio of the training instance. + :vartype subsample: float + :ivar subsample_freq: Frequency of subsample. + :vartype subsample_freq: float + :ivar tree_method: Specify the tree method. + :vartype tree_method: str + :ivar with_mean: If true, center before scaling the data with StandardScalar. + :vartype with_mean: bool + :ivar with_std: If true, scaling the data with Unit Variance with StandardScalar. + :vartype with_std: bool """ - _validation = { - 'id': {'readonly': True}, - } - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, + "booster": {"key": "booster", "type": "str"}, + "boosting_type": {"key": "boostingType", "type": "str"}, + "grow_policy": {"key": "growPolicy", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "float"}, + "max_bin": {"key": "maxBin", "type": "int"}, + "max_depth": {"key": "maxDepth", "type": "int"}, + "max_leaves": {"key": "maxLeaves", "type": "int"}, + "min_data_in_leaf": {"key": "minDataInLeaf", "type": "int"}, + "min_split_gain": {"key": "minSplitGain", "type": "float"}, + "model_name": {"key": "modelName", "type": "str"}, + "n_estimators": {"key": "nEstimators", "type": "int"}, + "num_leaves": {"key": "numLeaves", "type": "int"}, + "preprocessor_name": {"key": "preprocessorName", "type": "str"}, + "reg_alpha": {"key": "regAlpha", "type": "float"}, + "reg_lambda": {"key": "regLambda", "type": "float"}, + "subsample": {"key": "subsample", "type": "float"}, + "subsample_freq": {"key": "subsampleFreq", "type": "float"}, + "tree_method": {"key": "treeMethod", "type": "str"}, + "with_mean": {"key": "withMean", "type": "bool"}, + "with_std": {"key": "withStd", "type": "bool"}, } def __init__( self, + *, + booster: Optional[str] = None, + boosting_type: Optional[str] = None, + grow_policy: Optional[str] = None, + learning_rate: Optional[float] = None, + max_bin: Optional[int] = None, + max_depth: Optional[int] = None, + max_leaves: Optional[int] = None, + min_data_in_leaf: Optional[int] = None, + min_split_gain: Optional[float] = None, + model_name: Optional[str] = None, + n_estimators: Optional[int] = None, + num_leaves: Optional[int] = None, + preprocessor_name: Optional[str] = None, + reg_alpha: Optional[float] = None, + reg_lambda: Optional[float] = None, + subsample: Optional[float] = None, + subsample_freq: Optional[float] = None, + tree_method: Optional[str] = None, + with_mean: bool = False, + with_std: bool = False, **kwargs ): - super(PrivateEndpoint, self).__init__(**kwargs) - self.id = None - - -class PrivateEndpointConnection(msrest.serialization.Model): - """The Private Endpoint Connection resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: ResourceId of the private endpoint connection. - :vartype id: str - :ivar name: Friendly name of the private endpoint connection. - :vartype name: str - :ivar type: Resource type of private endpoint connection. - :vartype type: str - :param private_endpoint: The resource of private end point. - :type private_endpoint: ~azure.mgmt.machinelearningservices.models.PrivateEndpoint - :param private_link_service_connection_state: A collection of information about the state of - the connection between service consumer and provider. - :type private_link_service_connection_state: - ~azure.mgmt.machinelearningservices.models.PrivateLinkServiceConnectionState - :ivar provisioning_state: The provisioning state of the private endpoint connection resource. - Possible values include: "Succeeded", "Creating", "Deleting", "Failed". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnectionProvisioningState + """ + :keyword booster: Specify the boosting type, e.g gbdt for XGBoost. + :paramtype booster: str + :keyword boosting_type: Specify the boosting type, e.g gbdt for LightGBM. + :paramtype boosting_type: str + :keyword grow_policy: Specify the grow policy, which controls the way new nodes are added to + the tree. + :paramtype grow_policy: str + :keyword learning_rate: The learning rate for the training procedure. + :paramtype learning_rate: float + :keyword max_bin: Specify the Maximum number of discrete bins to bucket continuous features . + :paramtype max_bin: int + :keyword max_depth: Specify the max depth to limit the tree depth explicitly. + :paramtype max_depth: int + :keyword max_leaves: Specify the max leaves to limit the tree leaves explicitly. + :paramtype max_leaves: int + :keyword min_data_in_leaf: The minimum number of data per leaf. + :paramtype min_data_in_leaf: int + :keyword min_split_gain: Minimum loss reduction required to make a further partition on a leaf + node of the tree. + :paramtype min_split_gain: float + :keyword model_name: The name of the model to train. + :paramtype model_name: str + :keyword n_estimators: Specify the number of trees (or rounds) in an model. + :paramtype n_estimators: int + :keyword num_leaves: Specify the number of leaves. + :paramtype num_leaves: int + :keyword preprocessor_name: The name of the preprocessor to use. + :paramtype preprocessor_name: str + :keyword reg_alpha: L1 regularization term on weights. + :paramtype reg_alpha: float + :keyword reg_lambda: L2 regularization term on weights. + :paramtype reg_lambda: float + :keyword subsample: Subsample ratio of the training instance. + :paramtype subsample: float + :keyword subsample_freq: Frequency of subsample. + :paramtype subsample_freq: float + :keyword tree_method: Specify the tree method. + :paramtype tree_method: str + :keyword with_mean: If true, center before scaling the data with StandardScalar. + :paramtype with_mean: bool + :keyword with_std: If true, scaling the data with Unit Variance with StandardScalar. + :paramtype with_std: bool + """ + super().__init__(**kwargs) + self.booster = booster + self.boosting_type = boosting_type + self.grow_policy = grow_policy + self.learning_rate = learning_rate + self.max_bin = max_bin + self.max_depth = max_depth + self.max_leaves = max_leaves + self.min_data_in_leaf = min_data_in_leaf + self.min_split_gain = min_split_gain + self.model_name = model_name + self.n_estimators = n_estimators + self.num_leaves = num_leaves + self.preprocessor_name = preprocessor_name + self.reg_alpha = reg_alpha + self.reg_lambda = reg_lambda + self.subsample = subsample + self.subsample_freq = subsample_freq + self.tree_method = tree_method + self.with_mean = with_mean + self.with_std = with_std + + +class TableParameterSubspace(_serialization.Model): # pylint: disable=too-many-instance-attributes + """TableParameterSubspace. + + :ivar booster: Specify the boosting type, e.g gbdt for XGBoost. + :vartype booster: str + :ivar boosting_type: Specify the boosting type, e.g gbdt for LightGBM. + :vartype boosting_type: str + :ivar grow_policy: Specify the grow policy, which controls the way new nodes are added to the + tree. + :vartype grow_policy: str + :ivar learning_rate: The learning rate for the training procedure. + :vartype learning_rate: str + :ivar max_bin: Specify the Maximum number of discrete bins to bucket continuous features . + :vartype max_bin: str + :ivar max_depth: Specify the max depth to limit the tree depth explicitly. + :vartype max_depth: str + :ivar max_leaves: Specify the max leaves to limit the tree leaves explicitly. + :vartype max_leaves: str + :ivar min_data_in_leaf: The minimum number of data per leaf. + :vartype min_data_in_leaf: str + :ivar min_split_gain: Minimum loss reduction required to make a further partition on a leaf + node of the tree. + :vartype min_split_gain: str + :ivar model_name: The name of the model to train. + :vartype model_name: str + :ivar n_estimators: Specify the number of trees (or rounds) in an model. + :vartype n_estimators: str + :ivar num_leaves: Specify the number of leaves. + :vartype num_leaves: str + :ivar preprocessor_name: The name of the preprocessor to use. + :vartype preprocessor_name: str + :ivar reg_alpha: L1 regularization term on weights. + :vartype reg_alpha: str + :ivar reg_lambda: L2 regularization term on weights. + :vartype reg_lambda: str + :ivar subsample: Subsample ratio of the training instance. + :vartype subsample: str + :ivar subsample_freq: Frequency of subsample. + :vartype subsample_freq: str + :ivar tree_method: Specify the tree method. + :vartype tree_method: str + :ivar with_mean: If true, center before scaling the data with StandardScalar. + :vartype with_mean: str + :ivar with_std: If true, scaling the data with Unit Variance with StandardScalar. + :vartype with_std: str """ - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - } - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'}, - 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + "booster": {"key": "booster", "type": "str"}, + "boosting_type": {"key": "boostingType", "type": "str"}, + "grow_policy": {"key": "growPolicy", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "str"}, + "max_bin": {"key": "maxBin", "type": "str"}, + "max_depth": {"key": "maxDepth", "type": "str"}, + "max_leaves": {"key": "maxLeaves", "type": "str"}, + "min_data_in_leaf": {"key": "minDataInLeaf", "type": "str"}, + "min_split_gain": {"key": "minSplitGain", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "n_estimators": {"key": "nEstimators", "type": "str"}, + "num_leaves": {"key": "numLeaves", "type": "str"}, + "preprocessor_name": {"key": "preprocessorName", "type": "str"}, + "reg_alpha": {"key": "regAlpha", "type": "str"}, + "reg_lambda": {"key": "regLambda", "type": "str"}, + "subsample": {"key": "subsample", "type": "str"}, + "subsample_freq": {"key": "subsampleFreq", "type": "str"}, + "tree_method": {"key": "treeMethod", "type": "str"}, + "with_mean": {"key": "withMean", "type": "str"}, + "with_std": {"key": "withStd", "type": "str"}, } def __init__( self, *, - private_endpoint: Optional["PrivateEndpoint"] = None, - private_link_service_connection_state: Optional["PrivateLinkServiceConnectionState"] = None, + booster: Optional[str] = None, + boosting_type: Optional[str] = None, + grow_policy: Optional[str] = None, + learning_rate: Optional[str] = None, + max_bin: Optional[str] = None, + max_depth: Optional[str] = None, + max_leaves: Optional[str] = None, + min_data_in_leaf: Optional[str] = None, + min_split_gain: Optional[str] = None, + model_name: Optional[str] = None, + n_estimators: Optional[str] = None, + num_leaves: Optional[str] = None, + preprocessor_name: Optional[str] = None, + reg_alpha: Optional[str] = None, + reg_lambda: Optional[str] = None, + subsample: Optional[str] = None, + subsample_freq: Optional[str] = None, + tree_method: Optional[str] = None, + with_mean: Optional[str] = None, + with_std: Optional[str] = None, **kwargs ): - super(PrivateEndpointConnection, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.private_endpoint = private_endpoint - self.private_link_service_connection_state = private_link_service_connection_state - self.provisioning_state = None - + """ + :keyword booster: Specify the boosting type, e.g gbdt for XGBoost. + :paramtype booster: str + :keyword boosting_type: Specify the boosting type, e.g gbdt for LightGBM. + :paramtype boosting_type: str + :keyword grow_policy: Specify the grow policy, which controls the way new nodes are added to + the tree. + :paramtype grow_policy: str + :keyword learning_rate: The learning rate for the training procedure. + :paramtype learning_rate: str + :keyword max_bin: Specify the Maximum number of discrete bins to bucket continuous features . + :paramtype max_bin: str + :keyword max_depth: Specify the max depth to limit the tree depth explicitly. + :paramtype max_depth: str + :keyword max_leaves: Specify the max leaves to limit the tree leaves explicitly. + :paramtype max_leaves: str + :keyword min_data_in_leaf: The minimum number of data per leaf. + :paramtype min_data_in_leaf: str + :keyword min_split_gain: Minimum loss reduction required to make a further partition on a leaf + node of the tree. + :paramtype min_split_gain: str + :keyword model_name: The name of the model to train. + :paramtype model_name: str + :keyword n_estimators: Specify the number of trees (or rounds) in an model. + :paramtype n_estimators: str + :keyword num_leaves: Specify the number of leaves. + :paramtype num_leaves: str + :keyword preprocessor_name: The name of the preprocessor to use. + :paramtype preprocessor_name: str + :keyword reg_alpha: L1 regularization term on weights. + :paramtype reg_alpha: str + :keyword reg_lambda: L2 regularization term on weights. + :paramtype reg_lambda: str + :keyword subsample: Subsample ratio of the training instance. + :paramtype subsample: str + :keyword subsample_freq: Frequency of subsample. + :paramtype subsample_freq: str + :keyword tree_method: Specify the tree method. + :paramtype tree_method: str + :keyword with_mean: If true, center before scaling the data with StandardScalar. + :paramtype with_mean: str + :keyword with_std: If true, scaling the data with Unit Variance with StandardScalar. + :paramtype with_std: str + """ + super().__init__(**kwargs) + self.booster = booster + self.boosting_type = boosting_type + self.grow_policy = grow_policy + self.learning_rate = learning_rate + self.max_bin = max_bin + self.max_depth = max_depth + self.max_leaves = max_leaves + self.min_data_in_leaf = min_data_in_leaf + self.min_split_gain = min_split_gain + self.model_name = model_name + self.n_estimators = n_estimators + self.num_leaves = num_leaves + self.preprocessor_name = preprocessor_name + self.reg_alpha = reg_alpha + self.reg_lambda = reg_lambda + self.subsample = subsample + self.subsample_freq = subsample_freq + self.tree_method = tree_method + self.with_mean = with_mean + self.with_std = with_std + + +class TableSweepSettings(_serialization.Model): + """TableSweepSettings. -class PrivateLinkResource(Resource): - """A private link resource. - - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar name: Specifies the name of the resource. - :vartype name: str - :param identity: The identity of the resource. - :type identity: ~azure.mgmt.machinelearningservices.models.Identity - :param location: Specifies the location of the resource. - :type location: str - :ivar type: Specifies the type of the resource. - :vartype type: str - :param tags: A set of tags. Contains resource tags defined as key/value pairs. - :type tags: dict[str, str] - :param sku: The sku of the workspace. - :type sku: ~azure.mgmt.machinelearningservices.models.Sku - :ivar group_id: The private link resource group id. - :vartype group_id: str - :ivar required_members: The private link resource required member names. - :vartype required_members: list[str] - :param required_zone_names: The private link resource Private link DNS zone name. - :type required_zone_names: list[str] + :ivar early_termination: Type of early termination policy for the sweeping job. + :vartype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy + :ivar sampling_algorithm: [Required] Type of sampling algorithm. Required. Known values are: + "Grid", "Random", and "Bayesian". + :vartype sampling_algorithm: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'group_id': {'readonly': True}, - 'required_members': {'readonly': True}, + "sampling_algorithm": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'group_id': {'key': 'properties.groupId', 'type': 'str'}, - 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'}, - 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'}, + "early_termination": {"key": "earlyTermination", "type": "EarlyTerminationPolicy"}, + "sampling_algorithm": {"key": "samplingAlgorithm", "type": "str"}, } def __init__( self, *, - identity: Optional["Identity"] = None, - location: Optional[str] = None, - tags: Optional[Dict[str, str]] = None, - sku: Optional["Sku"] = None, - required_zone_names: Optional[List[str]] = None, + sampling_algorithm: Union[str, "_models.SamplingAlgorithmType"], + early_termination: Optional["_models.EarlyTerminationPolicy"] = None, **kwargs ): - super(PrivateLinkResource, self).__init__(identity=identity, location=location, tags=tags, sku=sku, **kwargs) - self.group_id = None - self.required_members = None - self.required_zone_names = required_zone_names - - -class PrivateLinkResourceListResult(msrest.serialization.Model): - """A list of private link resources. - - :param value: Array of private link resources. - :type value: list[~azure.mgmt.machinelearningservices.models.PrivateLinkResource] + """ + :keyword early_termination: Type of early termination policy for the sweeping job. + :paramtype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy + :keyword sampling_algorithm: [Required] Type of sampling algorithm. Required. Known values are: + "Grid", "Random", and "Bayesian". + :paramtype sampling_algorithm: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + """ + super().__init__(**kwargs) + self.early_termination = early_termination + self.sampling_algorithm = sampling_algorithm + + +class TableVerticalFeaturizationSettings(FeaturizationSettings): + """Featurization Configuration. + + :ivar dataset_language: Dataset language, useful for the text data. + :vartype dataset_language: str + :ivar blocked_transformers: These transformers shall not be used in featurization. + :vartype blocked_transformers: list[str or + ~azure.mgmt.machinelearningservices.models.BlockedTransformers] + :ivar column_name_and_types: Dictionary of column name and its type (int, float, string, + datetime etc). + :vartype column_name_and_types: dict[str, str] + :ivar enable_dnn_featurization: Determines whether to use Dnn based featurizers for data + featurization. + :vartype enable_dnn_featurization: bool + :ivar mode: Featurization mode - User can keep the default 'Auto' mode and AutoML will take + care of necessary transformation of the data in featurization phase. + If 'Off' is selected then no featurization is done. + If 'Custom' is selected then user can specify additional inputs to customize how featurization + is done. Known values are: "Auto", "Custom", and "Off". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.FeaturizationMode + :ivar transformer_params: User can specify additional transformers to be used along with the + columns to which it would be applied and parameters for the transformer constructor. + :vartype transformer_params: dict[str, + list[~azure.mgmt.machinelearningservices.models.ColumnTransformer]] """ _attribute_map = { - 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, + "dataset_language": {"key": "datasetLanguage", "type": "str"}, + "blocked_transformers": {"key": "blockedTransformers", "type": "[str]"}, + "column_name_and_types": {"key": "columnNameAndTypes", "type": "{str}"}, + "enable_dnn_featurization": {"key": "enableDnnFeaturization", "type": "bool"}, + "mode": {"key": "mode", "type": "str"}, + "transformer_params": {"key": "transformerParams", "type": "{[ColumnTransformer]}"}, } def __init__( self, *, - value: Optional[List["PrivateLinkResource"]] = None, + dataset_language: Optional[str] = None, + blocked_transformers: Optional[List[Union[str, "_models.BlockedTransformers"]]] = None, + column_name_and_types: Optional[Dict[str, str]] = None, + enable_dnn_featurization: bool = False, + mode: Optional[Union[str, "_models.FeaturizationMode"]] = None, + transformer_params: Optional[Dict[str, List["_models.ColumnTransformer"]]] = None, **kwargs ): - super(PrivateLinkResourceListResult, self).__init__(**kwargs) - self.value = value - - -class PrivateLinkServiceConnectionState(msrest.serialization.Model): - """A collection of information about the state of the connection between service consumer and provider. - - :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner - of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected", - "Timeout". - :type status: str or - ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus - :param description: The reason for approval/rejection of the connection. - :type description: str - :param actions_required: A message indicating if changes on the service provider require any - updates on the consumer. - :type actions_required: str + """ + :keyword dataset_language: Dataset language, useful for the text data. + :paramtype dataset_language: str + :keyword blocked_transformers: These transformers shall not be used in featurization. + :paramtype blocked_transformers: list[str or + ~azure.mgmt.machinelearningservices.models.BlockedTransformers] + :keyword column_name_and_types: Dictionary of column name and its type (int, float, string, + datetime etc). + :paramtype column_name_and_types: dict[str, str] + :keyword enable_dnn_featurization: Determines whether to use Dnn based featurizers for data + featurization. + :paramtype enable_dnn_featurization: bool + :keyword mode: Featurization mode - User can keep the default 'Auto' mode and AutoML will take + care of necessary transformation of the data in featurization phase. + If 'Off' is selected then no featurization is done. + If 'Custom' is selected then user can specify additional inputs to customize how featurization + is done. Known values are: "Auto", "Custom", and "Off". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.FeaturizationMode + :keyword transformer_params: User can specify additional transformers to be used along with the + columns to which it would be applied and parameters for the transformer constructor. + :paramtype transformer_params: dict[str, + list[~azure.mgmt.machinelearningservices.models.ColumnTransformer]] + """ + super().__init__(dataset_language=dataset_language, **kwargs) + self.blocked_transformers = blocked_transformers + self.column_name_and_types = column_name_and_types + self.enable_dnn_featurization = enable_dnn_featurization + self.mode = mode + self.transformer_params = transformer_params + + +class TableVerticalLimitSettings(_serialization.Model): + """Job execution constraints. + + :ivar enable_early_termination: Enable early termination, determines whether or not if + AutoMLJob will terminate early if there is no score improvement in last 20 iterations. + :vartype enable_early_termination: bool + :ivar exit_score: Exit score for the AutoML job. + :vartype exit_score: float + :ivar max_concurrent_trials: Maximum Concurrent iterations. + :vartype max_concurrent_trials: int + :ivar max_cores_per_trial: Max cores per iteration. + :vartype max_cores_per_trial: int + :ivar max_trials: Number of iterations. + :vartype max_trials: int + :ivar sweep_concurrent_trials: Number of concurrent sweeping runs that user wants to trigger. + :vartype sweep_concurrent_trials: int + :ivar sweep_trials: Number of sweeping runs that user wants to trigger. + :vartype sweep_trials: int + :ivar timeout: AutoML job timeout. + :vartype timeout: ~datetime.timedelta + :ivar trial_timeout: Iteration timeout. + :vartype trial_timeout: ~datetime.timedelta """ _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, + "enable_early_termination": {"key": "enableEarlyTermination", "type": "bool"}, + "exit_score": {"key": "exitScore", "type": "float"}, + "max_concurrent_trials": {"key": "maxConcurrentTrials", "type": "int"}, + "max_cores_per_trial": {"key": "maxCoresPerTrial", "type": "int"}, + "max_trials": {"key": "maxTrials", "type": "int"}, + "sweep_concurrent_trials": {"key": "sweepConcurrentTrials", "type": "int"}, + "sweep_trials": {"key": "sweepTrials", "type": "int"}, + "timeout": {"key": "timeout", "type": "duration"}, + "trial_timeout": {"key": "trialTimeout", "type": "duration"}, } def __init__( self, *, - status: Optional[Union[str, "PrivateEndpointServiceConnectionStatus"]] = None, - description: Optional[str] = None, - actions_required: Optional[str] = None, + enable_early_termination: bool = True, + exit_score: Optional[float] = None, + max_concurrent_trials: int = 1, + max_cores_per_trial: int = -1, + max_trials: int = 1000, + sweep_concurrent_trials: int = 0, + sweep_trials: int = 0, + timeout: datetime.timedelta = "PT6H", + trial_timeout: datetime.timedelta = "PT30M", **kwargs ): - super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) - self.status = status - self.description = description - self.actions_required = actions_required - + """ + :keyword enable_early_termination: Enable early termination, determines whether or not if + AutoMLJob will terminate early if there is no score improvement in last 20 iterations. + :paramtype enable_early_termination: bool + :keyword exit_score: Exit score for the AutoML job. + :paramtype exit_score: float + :keyword max_concurrent_trials: Maximum Concurrent iterations. + :paramtype max_concurrent_trials: int + :keyword max_cores_per_trial: Max cores per iteration. + :paramtype max_cores_per_trial: int + :keyword max_trials: Number of iterations. + :paramtype max_trials: int + :keyword sweep_concurrent_trials: Number of concurrent sweeping runs that user wants to + trigger. + :paramtype sweep_concurrent_trials: int + :keyword sweep_trials: Number of sweeping runs that user wants to trigger. + :paramtype sweep_trials: int + :keyword timeout: AutoML job timeout. + :paramtype timeout: ~datetime.timedelta + :keyword trial_timeout: Iteration timeout. + :paramtype trial_timeout: ~datetime.timedelta + """ + super().__init__(**kwargs) + self.enable_early_termination = enable_early_termination + self.exit_score = exit_score + self.max_concurrent_trials = max_concurrent_trials + self.max_cores_per_trial = max_cores_per_trial + self.max_trials = max_trials + self.sweep_concurrent_trials = sweep_concurrent_trials + self.sweep_trials = sweep_trials + self.timeout = timeout + self.trial_timeout = trial_timeout + + +class TargetUtilizationScaleSettings(OnlineScaleSettings): + """TargetUtilizationScaleSettings. -class QuotaBaseProperties(msrest.serialization.Model): - """The properties for Quota update or retrieval. + All required parameters must be populated in order to send to Azure. - :param id: Specifies the resource ID. - :type id: str - :param type: Specifies the resource type. - :type type: str - :param limit: The maximum permitted quota of the resource. - :type limit: long - :param unit: An enum describing the unit of quota measurement. Possible values include: - "Count". - :type unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit + :ivar scale_type: [Required] Type of deployment scaling algorithm. Required. Known values are: + "Default" and "TargetUtilization". + :vartype scale_type: str or ~azure.mgmt.machinelearningservices.models.ScaleType + :ivar max_instances: The maximum number of instances that the deployment can scale to. The + quota will be reserved for max_instances. + :vartype max_instances: int + :ivar min_instances: The minimum number of instances to always be present. + :vartype min_instances: int + :ivar polling_interval: The polling interval in ISO 8691 format. Only supports duration with + precision as low as Seconds. + :vartype polling_interval: ~datetime.timedelta + :ivar target_utilization_percentage: Target CPU usage for the autoscaler. + :vartype target_utilization_percentage: int """ + _validation = { + "scale_type": {"required": True}, + } + _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'unit': {'key': 'unit', 'type': 'str'}, + "scale_type": {"key": "scaleType", "type": "str"}, + "max_instances": {"key": "maxInstances", "type": "int"}, + "min_instances": {"key": "minInstances", "type": "int"}, + "polling_interval": {"key": "pollingInterval", "type": "duration"}, + "target_utilization_percentage": {"key": "targetUtilizationPercentage", "type": "int"}, } def __init__( self, *, - id: Optional[str] = None, - type: Optional[str] = None, - limit: Optional[int] = None, - unit: Optional[Union[str, "QuotaUnit"]] = None, + max_instances: int = 1, + min_instances: int = 1, + polling_interval: datetime.timedelta = "PT1S", + target_utilization_percentage: int = 70, **kwargs ): - super(QuotaBaseProperties, self).__init__(**kwargs) - self.id = id - self.type = type - self.limit = limit - self.unit = unit - + """ + :keyword max_instances: The maximum number of instances that the deployment can scale to. The + quota will be reserved for max_instances. + :paramtype max_instances: int + :keyword min_instances: The minimum number of instances to always be present. + :paramtype min_instances: int + :keyword polling_interval: The polling interval in ISO 8691 format. Only supports duration with + precision as low as Seconds. + :paramtype polling_interval: ~datetime.timedelta + :keyword target_utilization_percentage: Target CPU usage for the autoscaler. + :paramtype target_utilization_percentage: int + """ + super().__init__(**kwargs) + self.scale_type = "TargetUtilization" # type: str + self.max_instances = max_instances + self.min_instances = min_instances + self.polling_interval = polling_interval + self.target_utilization_percentage = target_utilization_percentage + + +class TensorFlow(DistributionConfiguration): + """TensorFlow distribution configuration. -class QuotaUpdateParameters(msrest.serialization.Model): - """Quota update parameters. + All required parameters must be populated in order to send to Azure. - :param value: The list for update quota. - :type value: list[~azure.mgmt.machinelearningservices.models.QuotaBaseProperties] + :ivar distribution_type: [Required] Specifies the type of distribution framework. Required. + Known values are: "PyTorch", "TensorFlow", and "Mpi". + :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType + :ivar parameter_server_count: Number of parameter server tasks. + :vartype parameter_server_count: int + :ivar worker_count: Number of workers. If not specified, will default to the instance count. + :vartype worker_count: int """ - _attribute_map = { - 'value': {'key': 'value', 'type': '[QuotaBaseProperties]'}, + _validation = { + "distribution_type": {"required": True}, } - def __init__( - self, - *, - value: Optional[List["QuotaBaseProperties"]] = None, - **kwargs - ): - super(QuotaUpdateParameters, self).__init__(**kwargs) - self.value = value + _attribute_map = { + "distribution_type": {"key": "distributionType", "type": "str"}, + "parameter_server_count": {"key": "parameterServerCount", "type": "int"}, + "worker_count": {"key": "workerCount", "type": "int"}, + } + def __init__(self, *, parameter_server_count: int = 0, worker_count: Optional[int] = None, **kwargs): + """ + :keyword parameter_server_count: Number of parameter server tasks. + :paramtype parameter_server_count: int + :keyword worker_count: Number of workers. If not specified, will default to the instance count. + :paramtype worker_count: int + """ + super().__init__(**kwargs) + self.distribution_type = "TensorFlow" # type: str + self.parameter_server_count = parameter_server_count + self.worker_count = worker_count + + +class TextClassification(NlpVertical, AutoMLVertical): # pylint: disable=too-many-instance-attributes + """Text Classification task in AutoML NLP vertical. + NLP - Natural Language Processing. + + All required parameters must be populated in order to send to Azure. + + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar featurization_settings: Featurization inputs needed for AutoML job. + :vartype featurization_settings: + ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :ivar fixed_parameters: Model/training parameters that will remain constant throughout + training. + :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters + :ivar limit_settings: Execution constraints for AutoMLJob. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] + :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar primary_metric: Primary metric for Text-Classification task. Known values are: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", and + "PrecisionScoreWeighted". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics + """ -class RegistryListCredentialsResult(msrest.serialization.Model): - """RegistryListCredentialsResult. + _validation = { + "task_type": {"required": True}, + "training_data": {"required": True}, + } - Variables are only populated by the server, and will be ignored when sending a request. + _attribute_map = { + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "featurization_settings": {"key": "featurizationSettings", "type": "NlpVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "NlpFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "NlpVerticalLimitSettings"}, + "search_space": {"key": "searchSpace", "type": "[NlpParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "NlpSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, + } - :ivar location: - :vartype location: str - :ivar username: - :vartype username: str - :param passwords: - :type passwords: list[~azure.mgmt.machinelearningservices.models.Password] + def __init__( + self, + *, + training_data: "_models.MLTableJobInput", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + featurization_settings: Optional["_models.NlpVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.NlpFixedParameters"] = None, + limit_settings: Optional["_models.NlpVerticalLimitSettings"] = None, + search_space: Optional[List["_models.NlpParameterSubspace"]] = None, + sweep_settings: Optional["_models.NlpSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + primary_metric: Optional[Union[str, "_models.ClassificationPrimaryMetrics"]] = None, + **kwargs + ): + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword featurization_settings: Featurization inputs needed for AutoML job. + :paramtype featurization_settings: + ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :keyword fixed_parameters: Model/training parameters that will remain constant throughout + training. + :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters + :keyword limit_settings: Execution constraints for AutoMLJob. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] + :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword primary_metric: Primary metric for Text-Classification task. Known values are: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", and + "PrecisionScoreWeighted". + :paramtype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics + """ + super().__init__( + featurization_settings=featurization_settings, + fixed_parameters=fixed_parameters, + limit_settings=limit_settings, + search_space=search_space, + sweep_settings=sweep_settings, + validation_data=validation_data, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type = "TextClassification" # type: str + self.training_data = training_data + self.primary_metric = primary_metric + self.featurization_settings = featurization_settings + self.fixed_parameters = fixed_parameters + self.limit_settings = limit_settings + self.search_space = search_space + self.sweep_settings = sweep_settings + self.validation_data = validation_data + + +class TextClassificationMultilabel(NlpVertical, AutoMLVertical): # pylint: disable=too-many-instance-attributes + """Text Classification Multilabel task in AutoML NLP vertical. + NLP - Natural Language Processing. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar featurization_settings: Featurization inputs needed for AutoML job. + :vartype featurization_settings: + ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :ivar fixed_parameters: Model/training parameters that will remain constant throughout + training. + :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters + :ivar limit_settings: Execution constraints for AutoMLJob. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] + :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar primary_metric: Primary metric for Text-Classification-Multilabel task. + Currently only Accuracy is supported as primary metric, hence user need not set it explicitly. + Known values are: "AUCWeighted", "Accuracy", "NormMacroRecall", + "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", and "IOU". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationMultilabelPrimaryMetrics """ _validation = { - 'location': {'readonly': True}, - 'username': {'readonly': True}, + "task_type": {"required": True}, + "training_data": {"required": True}, + "primary_metric": {"readonly": True}, } _attribute_map = { - 'location': {'key': 'location', 'type': 'str'}, - 'username': {'key': 'username', 'type': 'str'}, - 'passwords': {'key': 'passwords', 'type': '[Password]'}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "featurization_settings": {"key": "featurizationSettings", "type": "NlpVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "NlpFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "NlpVerticalLimitSettings"}, + "search_space": {"key": "searchSpace", "type": "[NlpParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "NlpSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, } def __init__( self, *, - passwords: Optional[List["Password"]] = None, + training_data: "_models.MLTableJobInput", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + featurization_settings: Optional["_models.NlpVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.NlpFixedParameters"] = None, + limit_settings: Optional["_models.NlpVerticalLimitSettings"] = None, + search_space: Optional[List["_models.NlpParameterSubspace"]] = None, + sweep_settings: Optional["_models.NlpSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, **kwargs ): - super(RegistryListCredentialsResult, self).__init__(**kwargs) - self.location = None - self.username = None - self.passwords = passwords - - -class ResourceId(msrest.serialization.Model): - """Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet. - - All required parameters must be populated in order to send to Azure. - - :param id: Required. The ID of the resource. - :type id: str + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword featurization_settings: Featurization inputs needed for AutoML job. + :paramtype featurization_settings: + ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :keyword fixed_parameters: Model/training parameters that will remain constant throughout + training. + :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters + :keyword limit_settings: Execution constraints for AutoMLJob. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] + :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + """ + super().__init__( + featurization_settings=featurization_settings, + fixed_parameters=fixed_parameters, + limit_settings=limit_settings, + search_space=search_space, + sweep_settings=sweep_settings, + validation_data=validation_data, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type = "TextClassificationMultilabel" # type: str + self.training_data = training_data + self.primary_metric = None + self.featurization_settings = featurization_settings + self.fixed_parameters = fixed_parameters + self.limit_settings = limit_settings + self.search_space = search_space + self.sweep_settings = sweep_settings + self.validation_data = validation_data + + +class TextNer(NlpVertical, AutoMLVertical): # pylint: disable=too-many-instance-attributes + """Text-NER task in AutoML NLP vertical. + NER - Named Entity Recognition. + NLP - Natural Language Processing. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar featurization_settings: Featurization inputs needed for AutoML job. + :vartype featurization_settings: + ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :ivar fixed_parameters: Model/training parameters that will remain constant throughout + training. + :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters + :ivar limit_settings: Execution constraints for AutoMLJob. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] + :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar primary_metric: Primary metric for Text-NER task. + Only 'Accuracy' is supported for Text-NER, so user need not set this explicitly. Known values + are: "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", and + "PrecisionScoreWeighted". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics """ _validation = { - 'id': {'required': True}, + "task_type": {"required": True}, + "training_data": {"required": True}, + "primary_metric": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "featurization_settings": {"key": "featurizationSettings", "type": "NlpVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "NlpFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "NlpVerticalLimitSettings"}, + "search_space": {"key": "searchSpace", "type": "[NlpParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "NlpSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, } def __init__( self, *, - id: str, + training_data: "_models.MLTableJobInput", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + featurization_settings: Optional["_models.NlpVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.NlpFixedParameters"] = None, + limit_settings: Optional["_models.NlpVerticalLimitSettings"] = None, + search_space: Optional[List["_models.NlpParameterSubspace"]] = None, + sweep_settings: Optional["_models.NlpSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, **kwargs ): - super(ResourceId, self).__init__(**kwargs) - self.id = id + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword featurization_settings: Featurization inputs needed for AutoML job. + :paramtype featurization_settings: + ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :keyword fixed_parameters: Model/training parameters that will remain constant throughout + training. + :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters + :keyword limit_settings: Execution constraints for AutoMLJob. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] + :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + """ + super().__init__( + featurization_settings=featurization_settings, + fixed_parameters=fixed_parameters, + limit_settings=limit_settings, + search_space=search_space, + sweep_settings=sweep_settings, + validation_data=validation_data, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type = "TextNER" # type: str + self.training_data = training_data + self.primary_metric = None + self.featurization_settings = featurization_settings + self.fixed_parameters = fixed_parameters + self.limit_settings = limit_settings + self.search_space = search_space + self.sweep_settings = sweep_settings + self.validation_data = validation_data + + +class TmpfsOptions(_serialization.Model): + """TmpfsOptions. + + :ivar size: Mention the Tmpfs size. + :vartype size: int + """ + + _attribute_map = { + "size": {"key": "size", "type": "int"}, + } + def __init__(self, *, size: Optional[int] = None, **kwargs): + """ + :keyword size: Mention the Tmpfs size. + :paramtype size: int + """ + super().__init__(**kwargs) + self.size = size -class ResourceName(msrest.serialization.Model): - """The Resource Name. - Variables are only populated by the server, and will be ignored when sending a request. +class TrialComponent(_serialization.Model): + """Trial component definition. - :ivar value: The name of the resource. - :vartype value: str - :ivar localized_value: The localized name of the resource. - :vartype localized_value: str + All required parameters must be populated in order to send to Azure. + + :ivar code_id: ARM resource ID of the code asset. + :vartype code_id: str + :ivar command: [Required] The command to execute on startup of the job. eg. "python train.py". + Required. + :vartype command: str + :ivar distribution: Distribution configuration of the job. If set, this should be one of Mpi, + Tensorflow, PyTorch, or null. + :vartype distribution: ~azure.mgmt.machinelearningservices.models.DistributionConfiguration + :ivar environment_id: [Required] The ARM resource ID of the Environment specification for the + job. Required. + :vartype environment_id: str + :ivar environment_variables: Environment variables included in the job. + :vartype environment_variables: dict[str, str] + :ivar resources: Compute Resource configuration for the job. + :vartype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration """ _validation = { - 'value': {'readonly': True}, - 'localized_value': {'readonly': True}, + "command": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "environment_id": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'value': {'key': 'value', 'type': 'str'}, - 'localized_value': {'key': 'localizedValue', 'type': 'str'}, + "code_id": {"key": "codeId", "type": "str"}, + "command": {"key": "command", "type": "str"}, + "distribution": {"key": "distribution", "type": "DistributionConfiguration"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "resources": {"key": "resources", "type": "JobResourceConfiguration"}, } def __init__( self, + *, + command: str, + environment_id: str, + code_id: Optional[str] = None, + distribution: Optional["_models.DistributionConfiguration"] = None, + environment_variables: Optional[Dict[str, str]] = None, + resources: Optional["_models.JobResourceConfiguration"] = None, **kwargs ): - super(ResourceName, self).__init__(**kwargs) - self.value = None - self.localized_value = None - - -class ResourceQuota(msrest.serialization.Model): - """The quota assigned to a resource. + """ + :keyword code_id: ARM resource ID of the code asset. + :paramtype code_id: str + :keyword command: [Required] The command to execute on startup of the job. eg. "python + train.py". Required. + :paramtype command: str + :keyword distribution: Distribution configuration of the job. If set, this should be one of + Mpi, Tensorflow, PyTorch, or null. + :paramtype distribution: ~azure.mgmt.machinelearningservices.models.DistributionConfiguration + :keyword environment_id: [Required] The ARM resource ID of the Environment specification for + the job. Required. + :paramtype environment_id: str + :keyword environment_variables: Environment variables included in the job. + :paramtype environment_variables: dict[str, str] + :keyword resources: Compute Resource configuration for the job. + :paramtype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration + """ + super().__init__(**kwargs) + self.code_id = code_id + self.command = command + self.distribution = distribution + self.environment_id = environment_id + self.environment_variables = environment_variables + self.resources = resources + + +class TritonModelJobInput(AssetJobInput, JobInput): + """TritonModelJobInput. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar type: Specifies the resource type. - :vartype type: str - :ivar name: Name of the resource. - :vartype name: ~azure.mgmt.machinelearningservices.models.ResourceName - :ivar limit: The maximum permitted quota of the resource. - :vartype limit: long - :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count". - :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit + :ivar description: Description for the input. + :vartype description: str + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :ivar uri: [Required] Input Asset URI. Required. + :vartype uri: str """ _validation = { - 'id': {'readonly': True}, - 'type': {'readonly': True}, - 'name': {'readonly': True}, - 'limit': {'readonly': True}, - 'unit': {'readonly': True}, + "job_input_type": {"required": True}, + "uri": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'ResourceName'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'unit': {'key': 'unit', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( self, + *, + uri: str, + description: Optional[str] = None, + mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, **kwargs ): - super(ResourceQuota, self).__init__(**kwargs) - self.id = None - self.type = None - self.name = None - self.limit = None - self.unit = None + """ + :keyword description: Description for the input. + :paramtype description: str + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :keyword uri: [Required] Input Asset URI. Required. + :paramtype uri: str + """ + super().__init__(mode=mode, uri=uri, description=description, **kwargs) + self.description = description + self.job_input_type = "triton_model" # type: str + self.mode = mode + self.uri = uri -class ResourceSkuLocationInfo(msrest.serialization.Model): - """ResourceSkuLocationInfo. +class TritonModelJobOutput(AssetJobOutput, JobOutput): + """TritonModelJobOutput. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar location: Location of the SKU. - :vartype location: str - :ivar zones: List of availability zones where the SKU is supported. - :vartype zones: list[str] - :ivar zone_details: Details of capabilities available to a SKU in specific zones. - :vartype zone_details: list[~azure.mgmt.machinelearningservices.models.ResourceSkuZoneDetails] + :ivar description: Description for the output. + :vartype description: str + :ivar job_output_type: [Required] Specifies the type of job. Required. Known values are: + "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". + :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :ivar uri: Output Asset URI. + :vartype uri: str """ _validation = { - 'location': {'readonly': True}, - 'zones': {'readonly': True}, - 'zone_details': {'readonly': True}, + "job_output_type": {"required": True}, } _attribute_map = { - 'location': {'key': 'location', 'type': 'str'}, - 'zones': {'key': 'zones', 'type': '[str]'}, - 'zone_details': {'key': 'zoneDetails', 'type': '[ResourceSkuZoneDetails]'}, + "description": {"key": "description", "type": "str"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( self, + *, + description: Optional[str] = None, + mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, + uri: Optional[str] = None, **kwargs ): - super(ResourceSkuLocationInfo, self).__init__(**kwargs) - self.location = None - self.zones = None - self.zone_details = None + """ + :keyword description: Description for the output. + :paramtype description: str + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :keyword uri: Output Asset URI. + :paramtype uri: str + """ + super().__init__(mode=mode, uri=uri, description=description, **kwargs) + self.description = description + self.job_output_type = "triton_model" # type: str + self.mode = mode + self.uri = uri -class ResourceSkuZoneDetails(msrest.serialization.Model): - """Describes The zonal capabilities of a SKU. +class TruncationSelectionPolicy(EarlyTerminationPolicy): + """Defines an early termination policy that cancels a given percentage of runs at each evaluation interval. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar name: The set of zones that the SKU is available in with the specified capabilities. - :vartype name: list[str] - :ivar capabilities: A list of capabilities that are available for the SKU in the specified list - of zones. - :vartype capabilities: list[~azure.mgmt.machinelearningservices.models.SKUCapability] + :ivar delay_evaluation: Number of intervals by which to delay the first evaluation. + :vartype delay_evaluation: int + :ivar evaluation_interval: Interval (number of runs) between policy evaluations. + :vartype evaluation_interval: int + :ivar policy_type: [Required] Name of policy configuration. Required. Known values are: + "Bandit", "MedianStopping", and "TruncationSelection". + :vartype policy_type: str or + ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicyType + :ivar truncation_percentage: The percentage of runs to cancel at each evaluation interval. + :vartype truncation_percentage: int """ _validation = { - 'name': {'readonly': True}, - 'capabilities': {'readonly': True}, + "policy_type": {"required": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': '[str]'}, - 'capabilities': {'key': 'capabilities', 'type': '[SKUCapability]'}, + "delay_evaluation": {"key": "delayEvaluation", "type": "int"}, + "evaluation_interval": {"key": "evaluationInterval", "type": "int"}, + "policy_type": {"key": "policyType", "type": "str"}, + "truncation_percentage": {"key": "truncationPercentage", "type": "int"}, } def __init__( - self, - **kwargs + self, *, delay_evaluation: int = 0, evaluation_interval: int = 0, truncation_percentage: int = 0, **kwargs ): - super(ResourceSkuZoneDetails, self).__init__(**kwargs) - self.name = None - self.capabilities = None - - -class Restriction(msrest.serialization.Model): - """The restriction because of which SKU cannot be used. + """ + :keyword delay_evaluation: Number of intervals by which to delay the first evaluation. + :paramtype delay_evaluation: int + :keyword evaluation_interval: Interval (number of runs) between policy evaluations. + :paramtype evaluation_interval: int + :keyword truncation_percentage: The percentage of runs to cancel at each evaluation interval. + :paramtype truncation_percentage: int + """ + super().__init__(delay_evaluation=delay_evaluation, evaluation_interval=evaluation_interval, **kwargs) + self.policy_type = "TruncationSelection" # type: str + self.truncation_percentage = truncation_percentage + + +class UpdateWorkspaceQuotas(_serialization.Model): + """The properties for update Quota response. Variables are only populated by the server, and will be ignored when sending a request. - :ivar type: The type of restrictions. As of now only possible value for this is location. + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar type: Specifies the resource type. :vartype type: str - :ivar values: The value of restrictions. If the restriction type is set to location. This would - be different locations where the SKU is restricted. - :vartype values: list[str] - :param reason_code: The reason for the restriction. Possible values include: "NotSpecified", - "NotAvailableForRegion", "NotAvailableForSubscription". - :type reason_code: str or ~azure.mgmt.machinelearningservices.models.ReasonCode + :ivar limit: The maximum permitted quota of the resource. + :vartype limit: int + :ivar unit: An enum describing the unit of quota measurement. "Count" + :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit + :ivar status: Status of update workspace quota. Known values are: "Undefined", "Success", + "Failure", "InvalidQuotaBelowClusterMinimum", "InvalidQuotaExceedsSubscriptionLimit", + "InvalidVMFamilyName", "OperationNotSupportedForSku", and "OperationNotEnabledForRegion". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.Status """ _validation = { - 'type': {'readonly': True}, - 'values': {'readonly': True}, + "id": {"readonly": True}, + "type": {"readonly": True}, + "unit": {"readonly": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[str]'}, - 'reason_code': {'key': 'reasonCode', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "limit": {"key": "limit", "type": "int"}, + "unit": {"key": "unit", "type": "str"}, + "status": {"key": "status", "type": "str"}, } - def __init__( - self, - *, - reason_code: Optional[Union[str, "ReasonCode"]] = None, - **kwargs - ): - super(Restriction, self).__init__(**kwargs) + def __init__(self, *, limit: Optional[int] = None, status: Optional[Union[str, "_models.Status"]] = None, **kwargs): + """ + :keyword limit: The maximum permitted quota of the resource. + :paramtype limit: int + :keyword status: Status of update workspace quota. Known values are: "Undefined", "Success", + "Failure", "InvalidQuotaBelowClusterMinimum", "InvalidQuotaExceedsSubscriptionLimit", + "InvalidVMFamilyName", "OperationNotSupportedForSku", and "OperationNotEnabledForRegion". + :paramtype status: str or ~azure.mgmt.machinelearningservices.models.Status + """ + super().__init__(**kwargs) + self.id = None self.type = None - self.values = None - self.reason_code = reason_code + self.limit = limit + self.unit = None + self.status = status -class ScaleSettings(msrest.serialization.Model): - """scale settings for AML Compute. +class UpdateWorkspaceQuotasResult(_serialization.Model): + """The result of update workspace quota. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. - :param max_node_count: Required. Max number of nodes to use. - :type max_node_count: int - :param min_node_count: Min number of nodes to use. - :type min_node_count: int - :param node_idle_time_before_scale_down: Node Idle Time before scaling down amlCompute. - :type node_idle_time_before_scale_down: ~datetime.timedelta + :ivar value: The list of workspace quota update result. + :vartype value: list[~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotas] + :ivar next_link: The URI to fetch the next page of workspace quota update result. Call + ListNext() with this to fetch the next page of Workspace Quota update result. + :vartype next_link: str """ _validation = { - 'max_node_count': {'required': True}, + "value": {"readonly": True}, + "next_link": {"readonly": True}, } _attribute_map = { - 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, - 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, - 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'}, + "value": {"key": "value", "type": "[UpdateWorkspaceQuotas]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - *, - max_node_count: int, - min_node_count: Optional[int] = 0, - node_idle_time_before_scale_down: Optional[datetime.timedelta] = None, - **kwargs - ): - super(ScaleSettings, self).__init__(**kwargs) - self.max_node_count = max_node_count - self.min_node_count = min_node_count - self.node_idle_time_before_scale_down = node_idle_time_before_scale_down + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.value = None + self.next_link = None -class ServicePrincipalCredentials(msrest.serialization.Model): - """Service principal credentials. +class UriFileDataVersion(DataVersionBaseProperties): + """uri-file data version entity. All required parameters must be populated in order to send to Azure. - :param client_id: Required. Client Id. - :type client_id: str - :param client_secret: Required. Client secret. - :type client_secret: str + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_anonymous: If the name version are system generated (anonymous registration). + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar data_type: [Required] Specifies the type of data. Required. Known values are: "uri_file", + "uri_folder", and "mltable". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType + :ivar data_uri: [Required] Uri of the data. Usage/meaning depends on + Microsoft.MachineLearning.ManagementFrontEnd.Contracts.V20221001Preview.Assets.DataVersionBase.DataType. + Required. + :vartype data_uri: str """ _validation = { - 'client_id': {'required': True}, - 'client_secret': {'required': True}, + "data_type": {"required": True}, + "data_uri": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'client_secret': {'key': 'clientSecret', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "data_type": {"key": "dataType", "type": "str"}, + "data_uri": {"key": "dataUri", "type": "str"}, } def __init__( self, *, - client_id: str, - client_secret: str, + data_uri: str, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_anonymous: bool = False, + is_archived: bool = False, **kwargs ): - super(ServicePrincipalCredentials, self).__init__(**kwargs) - self.client_id = client_id - self.client_secret = client_secret - + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_anonymous: If the name version are system generated (anonymous registration). + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword data_uri: [Required] Uri of the data. Usage/meaning depends on + Microsoft.MachineLearning.ManagementFrontEnd.Contracts.V20221001Preview.Assets.DataVersionBase.DataType. + Required. + :paramtype data_uri: str + """ + super().__init__( + description=description, + properties=properties, + tags=tags, + is_anonymous=is_anonymous, + is_archived=is_archived, + data_uri=data_uri, + **kwargs + ) + self.data_type = "uri_file" # type: str + + +class UriFileJobInput(AssetJobInput, JobInput): + """UriFileJobInput. -class SharedPrivateLinkResource(msrest.serialization.Model): - """SharedPrivateLinkResource. + All required parameters must be populated in order to send to Azure. - :param name: Unique name of the private link. - :type name: str - :param private_link_resource_id: The resource id that private link links to. - :type private_link_resource_id: str - :param group_id: The private link resource group id. - :type group_id: str - :param request_message: Request message. - :type request_message: str - :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner - of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected", - "Timeout". - :type status: str or - ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus + :ivar description: Description for the input. + :vartype description: str + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :ivar uri: [Required] Input Asset URI. Required. + :vartype uri: str """ + _validation = { + "job_input_type": {"required": True}, + "uri": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + } + _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'}, - 'group_id': {'key': 'properties.groupId', 'type': 'str'}, - 'request_message': {'key': 'properties.requestMessage', 'type': 'str'}, - 'status': {'key': 'properties.status', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( self, *, - name: Optional[str] = None, - private_link_resource_id: Optional[str] = None, - group_id: Optional[str] = None, - request_message: Optional[str] = None, - status: Optional[Union[str, "PrivateEndpointServiceConnectionStatus"]] = None, + uri: str, + description: Optional[str] = None, + mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, **kwargs ): - super(SharedPrivateLinkResource, self).__init__(**kwargs) - self.name = name - self.private_link_resource_id = private_link_resource_id - self.group_id = group_id - self.request_message = request_message - self.status = status + """ + :keyword description: Description for the input. + :paramtype description: str + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :keyword uri: [Required] Input Asset URI. Required. + :paramtype uri: str + """ + super().__init__(mode=mode, uri=uri, description=description, **kwargs) + self.description = description + self.job_input_type = "uri_file" # type: str + self.mode = mode + self.uri = uri -class Sku(msrest.serialization.Model): - """Sku of the resource. +class UriFileJobOutput(AssetJobOutput, JobOutput): + """UriFileJobOutput. - :param name: Name of the sku. - :type name: str - :param tier: Tier of the sku like Basic or Enterprise. - :type tier: str + All required parameters must be populated in order to send to Azure. + + :ivar description: Description for the output. + :vartype description: str + :ivar job_output_type: [Required] Specifies the type of job. Required. Known values are: + "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". + :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :ivar uri: Output Asset URI. + :vartype uri: str """ + _validation = { + "job_output_type": {"required": True}, + } + _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'tier': {'key': 'tier', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( self, *, - name: Optional[str] = None, - tier: Optional[str] = None, + description: Optional[str] = None, + mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, + uri: Optional[str] = None, **kwargs ): - super(Sku, self).__init__(**kwargs) - self.name = name - self.tier = tier + """ + :keyword description: Description for the output. + :paramtype description: str + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :keyword uri: Output Asset URI. + :paramtype uri: str + """ + super().__init__(mode=mode, uri=uri, description=description, **kwargs) + self.description = description + self.job_output_type = "uri_file" # type: str + self.mode = mode + self.uri = uri -class SKUCapability(msrest.serialization.Model): - """Features/user capabilities associated with the sku. +class UriFolderDataVersion(DataVersionBaseProperties): + """uri-folder data version entity. + + All required parameters must be populated in order to send to Azure. - :param name: Capability/Feature ID. - :type name: str - :param value: Details about the feature/capability. - :type value: str + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_anonymous: If the name version are system generated (anonymous registration). + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar data_type: [Required] Specifies the type of data. Required. Known values are: "uri_file", + "uri_folder", and "mltable". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType + :ivar data_uri: [Required] Uri of the data. Usage/meaning depends on + Microsoft.MachineLearning.ManagementFrontEnd.Contracts.V20221001Preview.Assets.DataVersionBase.DataType. + Required. + :vartype data_uri: str """ + _validation = { + "data_type": {"required": True}, + "data_uri": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + } + _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "data_type": {"key": "dataType", "type": "str"}, + "data_uri": {"key": "dataUri", "type": "str"}, } def __init__( self, *, - name: Optional[str] = None, - value: Optional[str] = None, + data_uri: str, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_anonymous: bool = False, + is_archived: bool = False, **kwargs ): - super(SKUCapability, self).__init__(**kwargs) - self.name = name - self.value = value - + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_anonymous: If the name version are system generated (anonymous registration). + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword data_uri: [Required] Uri of the data. Usage/meaning depends on + Microsoft.MachineLearning.ManagementFrontEnd.Contracts.V20221001Preview.Assets.DataVersionBase.DataType. + Required. + :paramtype data_uri: str + """ + super().__init__( + description=description, + properties=properties, + tags=tags, + is_anonymous=is_anonymous, + is_archived=is_archived, + data_uri=data_uri, + **kwargs + ) + self.data_type = "uri_folder" # type: str + + +class UriFolderJobInput(AssetJobInput, JobInput): + """UriFolderJobInput. -class SkuListResult(msrest.serialization.Model): - """List of skus with features. + All required parameters must be populated in order to send to Azure. - :param value: - :type value: list[~azure.mgmt.machinelearningservices.models.WorkspaceSku] - :param next_link: The URI to fetch the next page of Workspace Skus. Call ListNext() with this - URI to fetch the next page of Workspace Skus. - :type next_link: str + :ivar description: Description for the input. + :vartype description: str + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :ivar uri: [Required] Input Asset URI. Required. + :vartype uri: str """ + _validation = { + "job_input_type": {"required": True}, + "uri": {"required": True, "pattern": r"[a-zA-Z0-9_]"}, + } + _attribute_map = { - 'value': {'key': 'value', 'type': '[WorkspaceSku]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( self, *, - value: Optional[List["WorkspaceSku"]] = None, - next_link: Optional[str] = None, + uri: str, + description: Optional[str] = None, + mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, **kwargs ): - super(SkuListResult, self).__init__(**kwargs) - self.value = value - self.next_link = next_link + """ + :keyword description: Description for the input. + :paramtype description: str + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :keyword uri: [Required] Input Asset URI. Required. + :paramtype uri: str + """ + super().__init__(mode=mode, uri=uri, description=description, **kwargs) + self.description = description + self.job_input_type = "uri_folder" # type: str + self.mode = mode + self.uri = uri -class SkuSettings(msrest.serialization.Model): - """Describes Workspace Sku details and features. +class UriFolderJobOutput(AssetJobOutput, JobOutput): + """UriFolderJobOutput. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar locations: The set of locations that the SKU is available. This will be supported and - registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.). - :vartype locations: list[str] - :ivar location_info: A list of locations and availability zones in those locations where the - SKU is available. - :vartype location_info: - list[~azure.mgmt.machinelearningservices.models.ResourceSkuLocationInfo] - :ivar tier: Sku Tier like Basic or Enterprise. - :vartype tier: str - :ivar resource_type: - :vartype resource_type: str - :ivar name: - :vartype name: str - :ivar capabilities: List of features/user capabilities associated with the sku. - :vartype capabilities: list[~azure.mgmt.machinelearningservices.models.SKUCapability] - :param restrictions: The restrictions because of which SKU cannot be used. This is empty if - there are no restrictions. - :type restrictions: list[~azure.mgmt.machinelearningservices.models.Restriction] + :ivar description: Description for the output. + :vartype description: str + :ivar job_output_type: [Required] Specifies the type of job. Required. Known values are: + "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". + :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :ivar uri: Output Asset URI. + :vartype uri: str """ _validation = { - 'locations': {'readonly': True}, - 'location_info': {'readonly': True}, - 'tier': {'readonly': True}, - 'resource_type': {'readonly': True}, - 'name': {'readonly': True}, - 'capabilities': {'readonly': True}, + "job_output_type": {"required": True}, } _attribute_map = { - 'locations': {'key': 'locations', 'type': '[str]'}, - 'location_info': {'key': 'locationInfo', 'type': '[ResourceSkuLocationInfo]'}, - 'tier': {'key': 'tier', 'type': 'str'}, - 'resource_type': {'key': 'resourceType', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'capabilities': {'key': 'capabilities', 'type': '[SKUCapability]'}, - 'restrictions': {'key': 'restrictions', 'type': '[Restriction]'}, + "description": {"key": "description", "type": "str"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( self, *, - restrictions: Optional[List["Restriction"]] = None, + description: Optional[str] = None, + mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, + uri: Optional[str] = None, **kwargs ): - super(SkuSettings, self).__init__(**kwargs) - self.locations = None - self.location_info = None - self.tier = None - self.resource_type = None - self.name = None - self.capabilities = None - self.restrictions = restrictions + """ + :keyword description: Description for the output. + :paramtype description: str + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :keyword uri: Output Asset URI. + :paramtype uri: str + """ + super().__init__(mode=mode, uri=uri, description=description, **kwargs) + self.description = description + self.job_output_type = "uri_folder" # type: str + self.mode = mode + self.uri = uri -class SslConfiguration(msrest.serialization.Model): - """The ssl configuration for scoring. +class Usage(_serialization.Model): + """Describes AML Resource Usage. + + Variables are only populated by the server, and will be ignored when sending a request. - :param status: Enable or disable ssl for scoring. Possible values include: "Disabled", - "Enabled". - :type status: str or ~azure.mgmt.machinelearningservices.models.SslConfigurationStatus - :param cert: Cert data. - :type cert: str - :param key: Key data. - :type key: str - :param cname: CNAME of the cert. - :type cname: str + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar aml_workspace_location: Region of the AML workspace in the id. + :vartype aml_workspace_location: str + :ivar type: Specifies the resource type. + :vartype type: str + :ivar unit: An enum describing the unit of usage measurement. "Count" + :vartype unit: str or ~azure.mgmt.machinelearningservices.models.UsageUnit + :ivar current_value: The current usage of the resource. + :vartype current_value: int + :ivar limit: The maximum permitted usage of the resource. + :vartype limit: int + :ivar name: The name of the type of usage. + :vartype name: ~azure.mgmt.machinelearningservices.models.UsageName """ + _validation = { + "id": {"readonly": True}, + "aml_workspace_location": {"readonly": True}, + "type": {"readonly": True}, + "unit": {"readonly": True}, + "current_value": {"readonly": True}, + "limit": {"readonly": True}, + "name": {"readonly": True}, + } + _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'cert': {'key': 'cert', 'type': 'str'}, - 'key': {'key': 'key', 'type': 'str'}, - 'cname': {'key': 'cname', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "aml_workspace_location": {"key": "amlWorkspaceLocation", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "unit": {"key": "unit", "type": "str"}, + "current_value": {"key": "currentValue", "type": "int"}, + "limit": {"key": "limit", "type": "int"}, + "name": {"key": "name", "type": "UsageName"}, } - def __init__( - self, - *, - status: Optional[Union[str, "SslConfigurationStatus"]] = None, - cert: Optional[str] = None, - key: Optional[str] = None, - cname: Optional[str] = None, - **kwargs - ): - super(SslConfiguration, self).__init__(**kwargs) - self.status = status - self.cert = cert - self.key = key - self.cname = cname + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.id = None + self.aml_workspace_location = None + self.type = None + self.unit = None + self.current_value = None + self.limit = None + self.name = None -class SystemService(msrest.serialization.Model): - """A system service running on a compute. +class UsageName(_serialization.Model): + """The Usage Names. Variables are only populated by the server, and will be ignored when sending a request. - :ivar system_service_type: The type of this system service. - :vartype system_service_type: str - :ivar public_ip_address: Public IP address. - :vartype public_ip_address: str - :ivar version: The version for this type. - :vartype version: str + :ivar value: The name of the resource. + :vartype value: str + :ivar localized_value: The localized name of the resource. + :vartype localized_value: str """ _validation = { - 'system_service_type': {'readonly': True}, - 'public_ip_address': {'readonly': True}, - 'version': {'readonly': True}, + "value": {"readonly": True}, + "localized_value": {"readonly": True}, } _attribute_map = { - 'system_service_type': {'key': 'systemServiceType', 'type': 'str'}, - 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, + "value": {"key": "value", "type": "str"}, + "localized_value": {"key": "localizedValue", "type": "str"}, } - def __init__( - self, - **kwargs - ): - super(SystemService, self).__init__(**kwargs) - self.system_service_type = None - self.public_ip_address = None - self.version = None + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.value = None + self.localized_value = None -class UpdateWorkspaceQuotas(msrest.serialization.Model): - """The properties for update Quota response. +class UserAccountCredentials(_serialization.Model): + """Settings for user account that gets created on each on the nodes of a compute. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar type: Specifies the resource type. - :vartype type: str - :param limit: The maximum permitted quota of the resource. - :type limit: long - :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count". - :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit - :param status: Status of update workspace quota. Possible values include: "Undefined", - "Success", "Failure", "InvalidQuotaBelowClusterMinimum", - "InvalidQuotaExceedsSubscriptionLimit", "InvalidVMFamilyName", "OperationNotSupportedForSku", - "OperationNotEnabledForRegion". - :type status: str or ~azure.mgmt.machinelearningservices.models.Status + :ivar admin_user_name: Name of the administrator user account which can be used to SSH to + nodes. Required. + :vartype admin_user_name: str + :ivar admin_user_ssh_public_key: SSH public key of the administrator user account. + :vartype admin_user_ssh_public_key: str + :ivar admin_user_password: Password of the administrator user account. + :vartype admin_user_password: str """ _validation = { - 'id': {'readonly': True}, - 'type': {'readonly': True}, - 'unit': {'readonly': True}, + "admin_user_name": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'unit': {'key': 'unit', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, + "admin_user_name": {"key": "adminUserName", "type": "str"}, + "admin_user_ssh_public_key": {"key": "adminUserSshPublicKey", "type": "str"}, + "admin_user_password": {"key": "adminUserPassword", "type": "str"}, } def __init__( self, *, - limit: Optional[int] = None, - status: Optional[Union[str, "Status"]] = None, + admin_user_name: str, + admin_user_ssh_public_key: Optional[str] = None, + admin_user_password: Optional[str] = None, **kwargs ): - super(UpdateWorkspaceQuotas, self).__init__(**kwargs) - self.id = None - self.type = None - self.limit = limit - self.unit = None - self.status = status + """ + :keyword admin_user_name: Name of the administrator user account which can be used to SSH to + nodes. Required. + :paramtype admin_user_name: str + :keyword admin_user_ssh_public_key: SSH public key of the administrator user account. + :paramtype admin_user_ssh_public_key: str + :keyword admin_user_password: Password of the administrator user account. + :paramtype admin_user_password: str + """ + super().__init__(**kwargs) + self.admin_user_name = admin_user_name + self.admin_user_ssh_public_key = admin_user_ssh_public_key + self.admin_user_password = admin_user_password -class UpdateWorkspaceQuotasResult(msrest.serialization.Model): - """The result of update workspace quota. +class UserAssignedIdentity(_serialization.Model): + """User assigned identity properties. Variables are only populated by the server, and will be ignored when sending a request. - :ivar value: The list of workspace quota update result. - :vartype value: list[~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotas] - :ivar next_link: The URI to fetch the next page of workspace quota update result. Call - ListNext() with this to fetch the next page of Workspace Quota update result. - :vartype next_link: str + :ivar principal_id: The principal ID of the assigned identity. + :vartype principal_id: str + :ivar client_id: The client ID of the assigned identity. + :vartype client_id: str """ _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, + "principal_id": {"readonly": True}, + "client_id": {"readonly": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[UpdateWorkspaceQuotas]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "principal_id": {"key": "principalId", "type": "str"}, + "client_id": {"key": "clientId", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.principal_id = None + self.client_id = None + + +class UserCreatedAcrAccount(_serialization.Model): + """UserCreatedAcrAccount. + + :ivar arm_resource_id: ARM ResourceId of a resource. + :vartype arm_resource_id: ~azure.mgmt.machinelearningservices.models.ArmResourceId + """ + + _attribute_map = { + "arm_resource_id": {"key": "armResourceId", "type": "ArmResourceId"}, } - def __init__( - self, - **kwargs - ): - super(UpdateWorkspaceQuotasResult, self).__init__(**kwargs) - self.value = None - self.next_link = None - + def __init__(self, *, arm_resource_id: Optional["_models.ArmResourceId"] = None, **kwargs): + """ + :keyword arm_resource_id: ARM ResourceId of a resource. + :paramtype arm_resource_id: ~azure.mgmt.machinelearningservices.models.ArmResourceId + """ + super().__init__(**kwargs) + self.arm_resource_id = arm_resource_id -class Usage(msrest.serialization.Model): - """Describes AML Resource Usage. - Variables are only populated by the server, and will be ignored when sending a request. +class UserCreatedStorageAccount(_serialization.Model): + """UserCreatedStorageAccount. - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar type: Specifies the resource type. - :vartype type: str - :ivar unit: An enum describing the unit of usage measurement. Possible values include: "Count". - :vartype unit: str or ~azure.mgmt.machinelearningservices.models.UsageUnit - :ivar current_value: The current usage of the resource. - :vartype current_value: long - :ivar limit: The maximum permitted usage of the resource. - :vartype limit: long - :ivar name: The name of the type of usage. - :vartype name: ~azure.mgmt.machinelearningservices.models.UsageName + :ivar arm_resource_id: ARM ResourceId of a resource. + :vartype arm_resource_id: ~azure.mgmt.machinelearningservices.models.ArmResourceId """ - _validation = { - 'id': {'readonly': True}, - 'type': {'readonly': True}, - 'unit': {'readonly': True}, - 'current_value': {'readonly': True}, - 'limit': {'readonly': True}, - 'name': {'readonly': True}, - } - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'unit': {'key': 'unit', 'type': 'str'}, - 'current_value': {'key': 'currentValue', 'type': 'long'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'UsageName'}, + "arm_resource_id": {"key": "armResourceId", "type": "ArmResourceId"}, } - def __init__( - self, - **kwargs - ): - super(Usage, self).__init__(**kwargs) - self.id = None - self.type = None - self.unit = None - self.current_value = None - self.limit = None - self.name = None + def __init__(self, *, arm_resource_id: Optional["_models.ArmResourceId"] = None, **kwargs): + """ + :keyword arm_resource_id: ARM ResourceId of a resource. + :paramtype arm_resource_id: ~azure.mgmt.machinelearningservices.models.ArmResourceId + """ + super().__init__(**kwargs) + self.arm_resource_id = arm_resource_id -class UsageName(msrest.serialization.Model): - """The Usage Names. +class UserIdentity(IdentityConfiguration): + """User identity configuration. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar value: The name of the resource. - :vartype value: str - :ivar localized_value: The localized name of the resource. - :vartype localized_value: str + :ivar identity_type: [Required] Specifies the type of identity framework. Required. Known + values are: "Managed", "AMLToken", and "UserIdentity". + :vartype identity_type: str or + ~azure.mgmt.machinelearningservices.models.IdentityConfigurationType """ _validation = { - 'value': {'readonly': True}, - 'localized_value': {'readonly': True}, + "identity_type": {"required": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': 'str'}, - 'localized_value': {'key': 'localizedValue', 'type': 'str'}, + "identity_type": {"key": "identityType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - super(UsageName, self).__init__(**kwargs) - self.value = None - self.localized_value = None + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.identity_type = "UserIdentity" # type: str -class UserAccountCredentials(msrest.serialization.Model): - """Settings for user account that gets created on each on the nodes of a compute. +class UsernamePasswordAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): + """UsernamePasswordAuthTypeWorkspaceConnectionProperties. All required parameters must be populated in order to send to Azure. - :param admin_user_name: Required. Name of the administrator user account which can be used to - SSH to nodes. - :type admin_user_name: str - :param admin_user_ssh_public_key: SSH public key of the administrator user account. - :type admin_user_ssh_public_key: str - :param admin_user_password: Password of the administrator user account. - :type admin_user_password: str + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", and + "AccessKey". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. + :vartype category: str + :ivar target: + :vartype target: str + :ivar value: Value details of the workspace connection. + :vartype value: str + :ivar value_format: format for the workspace connection value. "JSON" + :vartype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :ivar credentials: + :vartype credentials: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionUsernamePassword """ _validation = { - 'admin_user_name': {'required': True}, + "auth_type": {"required": True}, } _attribute_map = { - 'admin_user_name': {'key': 'adminUserName', 'type': 'str'}, - 'admin_user_ssh_public_key': {'key': 'adminUserSshPublicKey', 'type': 'str'}, - 'admin_user_password': {'key': 'adminUserPassword', 'type': 'str'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "target": {"key": "target", "type": "str"}, + "value": {"key": "value", "type": "str"}, + "value_format": {"key": "valueFormat", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionUsernamePassword"}, } def __init__( self, *, - admin_user_name: str, - admin_user_ssh_public_key: Optional[str] = None, - admin_user_password: Optional[str] = None, + category: Optional[str] = None, + target: Optional[str] = None, + value: Optional[str] = None, + value_format: Optional[Union[str, "_models.ValueFormat"]] = None, + credentials: Optional["_models.WorkspaceConnectionUsernamePassword"] = None, **kwargs ): - super(UserAccountCredentials, self).__init__(**kwargs) - self.admin_user_name = admin_user_name - self.admin_user_ssh_public_key = admin_user_ssh_public_key - self.admin_user_password = admin_user_password + """ + :keyword category: Category of the connection. + :paramtype category: str + :keyword target: + :paramtype target: str + :keyword value: Value details of the workspace connection. + :paramtype value: str + :keyword value_format: format for the workspace connection value. "JSON" + :paramtype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :keyword credentials: + :paramtype credentials: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionUsernamePassword + """ + super().__init__(category=category, target=target, value=value, value_format=value_format, **kwargs) + self.auth_type = "UsernamePassword" # type: str + self.credentials = credentials + + +class VirtualMachineSchema(_serialization.Model): + """VirtualMachineSchema. + + :ivar properties: + :vartype properties: ~azure.mgmt.machinelearningservices.models.VirtualMachineSchemaProperties + """ + + _attribute_map = { + "properties": {"key": "properties", "type": "VirtualMachineSchemaProperties"}, + } + + def __init__(self, *, properties: Optional["_models.VirtualMachineSchemaProperties"] = None, **kwargs): + """ + :keyword properties: + :paramtype properties: + ~azure.mgmt.machinelearningservices.models.VirtualMachineSchemaProperties + """ + super().__init__(**kwargs) + self.properties = properties -class VirtualMachine(Compute): +class VirtualMachine(Compute, VirtualMachineSchema): # pylint: disable=too-many-instance-attributes """A Machine Learning compute based on Azure Virtual Machines. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str + :ivar properties: + :vartype properties: ~azure.mgmt.machinelearningservices.models.VirtualMachineSchemaProperties + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. + ~azure.mgmt.machinelearningservices.models.ComputeProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. + :ivar modified_on: The time at which the compute was last modified. :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought from outside if true, or machine learning service provisioned it if false. :vartype is_attached_compute: bool - :param properties: - :type properties: ~azure.mgmt.machinelearningservices.models.VirtualMachineProperties + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "compute_location": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, + "disable_local_auth": {"readonly": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'VirtualMachineProperties'}, + "properties": {"key": "properties", "type": "VirtualMachineSchemaProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } def __init__( self, *, - compute_location: Optional[str] = None, + properties: Optional["_models.VirtualMachineSchemaProperties"] = None, description: Optional[str] = None, resource_id: Optional[str] = None, - properties: Optional["VirtualMachineProperties"] = None, **kwargs ): - super(VirtualMachine, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) - self.compute_type = 'VirtualMachine' # type: str + """ + :keyword properties: + :paramtype properties: + ~azure.mgmt.machinelearningservices.models.VirtualMachineSchemaProperties + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + """ + super().__init__(description=description, resource_id=resource_id, properties=properties, **kwargs) self.properties = properties + self.compute_type = "VirtualMachine" # type: str + self.compute_location = None + self.provisioning_state = None + self.description = description + self.created_on = None + self.modified_on = None + self.resource_id = resource_id + self.provisioning_errors = None + self.is_attached_compute = None + self.disable_local_auth = None + + +class VirtualMachineImage(_serialization.Model): + """Virtual Machine image for Windows AML Compute. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Virtual Machine image path. Required. + :vartype id: str + """ + + _validation = { + "id": {"required": True}, + } + _attribute_map = { + "id": {"key": "id", "type": "str"}, + } + + def __init__(self, *, id: str, **kwargs): # pylint: disable=redefined-builtin + """ + :keyword id: Virtual Machine image path. Required. + :paramtype id: str + """ + super().__init__(**kwargs) + self.id = id -class VirtualMachineProperties(msrest.serialization.Model): - """VirtualMachineProperties. - :param virtual_machine_size: Virtual Machine size. - :type virtual_machine_size: str - :param ssh_port: Port open for ssh connections. - :type ssh_port: int - :param address: Public IP address of the virtual machine. - :type address: str - :param administrator_account: Admin credentials for virtual machine. - :type administrator_account: +class VirtualMachineSchemaProperties(_serialization.Model): + """VirtualMachineSchemaProperties. + + :ivar virtual_machine_size: Virtual Machine size. + :vartype virtual_machine_size: str + :ivar ssh_port: Port open for ssh connections. + :vartype ssh_port: int + :ivar notebook_server_port: Notebook server port open for ssh connections. + :vartype notebook_server_port: int + :ivar address: Public IP address of the virtual machine. + :vartype address: str + :ivar administrator_account: Admin credentials for virtual machine. + :vartype administrator_account: ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + :ivar is_notebook_instance_compute: Indicates whether this compute will be used for running + notebooks. + :vartype is_notebook_instance_compute: bool """ _attribute_map = { - 'virtual_machine_size': {'key': 'virtualMachineSize', 'type': 'str'}, - 'ssh_port': {'key': 'sshPort', 'type': 'int'}, - 'address': {'key': 'address', 'type': 'str'}, - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + "virtual_machine_size": {"key": "virtualMachineSize", "type": "str"}, + "ssh_port": {"key": "sshPort", "type": "int"}, + "notebook_server_port": {"key": "notebookServerPort", "type": "int"}, + "address": {"key": "address", "type": "str"}, + "administrator_account": {"key": "administratorAccount", "type": "VirtualMachineSshCredentials"}, + "is_notebook_instance_compute": {"key": "isNotebookInstanceCompute", "type": "bool"}, } def __init__( @@ -3416,52 +24058,94 @@ def __init__( *, virtual_machine_size: Optional[str] = None, ssh_port: Optional[int] = None, + notebook_server_port: Optional[int] = None, address: Optional[str] = None, - administrator_account: Optional["VirtualMachineSshCredentials"] = None, + administrator_account: Optional["_models.VirtualMachineSshCredentials"] = None, + is_notebook_instance_compute: Optional[bool] = None, **kwargs ): - super(VirtualMachineProperties, self).__init__(**kwargs) + """ + :keyword virtual_machine_size: Virtual Machine size. + :paramtype virtual_machine_size: str + :keyword ssh_port: Port open for ssh connections. + :paramtype ssh_port: int + :keyword notebook_server_port: Notebook server port open for ssh connections. + :paramtype notebook_server_port: int + :keyword address: Public IP address of the virtual machine. + :paramtype address: str + :keyword administrator_account: Admin credentials for virtual machine. + :paramtype administrator_account: + ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + :keyword is_notebook_instance_compute: Indicates whether this compute will be used for running + notebooks. + :paramtype is_notebook_instance_compute: bool + """ + super().__init__(**kwargs) self.virtual_machine_size = virtual_machine_size self.ssh_port = ssh_port + self.notebook_server_port = notebook_server_port self.address = address self.administrator_account = administrator_account + self.is_notebook_instance_compute = is_notebook_instance_compute + + +class VirtualMachineSecretsSchema(_serialization.Model): + """VirtualMachineSecretsSchema. + + :ivar administrator_account: Admin credentials for virtual machine. + :vartype administrator_account: + ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + """ + + _attribute_map = { + "administrator_account": {"key": "administratorAccount", "type": "VirtualMachineSshCredentials"}, + } + + def __init__(self, *, administrator_account: Optional["_models.VirtualMachineSshCredentials"] = None, **kwargs): + """ + :keyword administrator_account: Admin credentials for virtual machine. + :paramtype administrator_account: + ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + """ + super().__init__(**kwargs) + self.administrator_account = administrator_account -class VirtualMachineSecrets(ComputeSecrets): +class VirtualMachineSecrets(ComputeSecrets, VirtualMachineSecretsSchema): """Secrets related to a Machine Learning compute based on AKS. All required parameters must be populated in order to send to Azure. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param administrator_account: Admin credentials for virtual machine. - :type administrator_account: + :ivar administrator_account: Admin credentials for virtual machine. + :vartype administrator_account: ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType """ _validation = { - 'compute_type': {'required': True}, + "compute_type": {"required": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + "administrator_account": {"key": "administratorAccount", "type": "VirtualMachineSshCredentials"}, + "compute_type": {"key": "computeType", "type": "str"}, } - def __init__( - self, - *, - administrator_account: Optional["VirtualMachineSshCredentials"] = None, - **kwargs - ): - super(VirtualMachineSecrets, self).__init__(**kwargs) - self.compute_type = 'VirtualMachine' # type: str + def __init__(self, *, administrator_account: Optional["_models.VirtualMachineSshCredentials"] = None, **kwargs): + """ + :keyword administrator_account: Admin credentials for virtual machine. + :paramtype administrator_account: + ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + """ + super().__init__(administrator_account=administrator_account, **kwargs) self.administrator_account = administrator_account + self.compute_type = "VirtualMachine" # type: str -class VirtualMachineSize(msrest.serialization.Model): +class VirtualMachineSize(_serialization.Model): # pylint: disable=too-many-instance-attributes """Describes the properties of a VM size. Variables are only populated by the server, and will be ignored when sending a request. @@ -3485,47 +24169,54 @@ class VirtualMachineSize(msrest.serialization.Model): :vartype low_priority_capable: bool :ivar premium_io: Specifies if the virtual machine size supports premium IO. :vartype premium_io: bool - :param estimated_vm_prices: The estimated price information for using a VM. - :type estimated_vm_prices: ~azure.mgmt.machinelearningservices.models.EstimatedVMPrices - :param supported_compute_types: Specifies the compute types supported by the virtual machine + :ivar estimated_vm_prices: The estimated price information for using a VM. + :vartype estimated_vm_prices: ~azure.mgmt.machinelearningservices.models.EstimatedVMPrices + :ivar supported_compute_types: Specifies the compute types supported by the virtual machine size. - :type supported_compute_types: list[str] + :vartype supported_compute_types: list[str] """ _validation = { - 'name': {'readonly': True}, - 'family': {'readonly': True}, - 'v_cp_us': {'readonly': True}, - 'gpus': {'readonly': True}, - 'os_vhd_size_mb': {'readonly': True}, - 'max_resource_volume_mb': {'readonly': True}, - 'memory_gb': {'readonly': True}, - 'low_priority_capable': {'readonly': True}, - 'premium_io': {'readonly': True}, + "name": {"readonly": True}, + "family": {"readonly": True}, + "v_cp_us": {"readonly": True}, + "gpus": {"readonly": True}, + "os_vhd_size_mb": {"readonly": True}, + "max_resource_volume_mb": {"readonly": True}, + "memory_gb": {"readonly": True}, + "low_priority_capable": {"readonly": True}, + "premium_io": {"readonly": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'family': {'key': 'family', 'type': 'str'}, - 'v_cp_us': {'key': 'vCPUs', 'type': 'int'}, - 'gpus': {'key': 'gpus', 'type': 'int'}, - 'os_vhd_size_mb': {'key': 'osVhdSizeMB', 'type': 'int'}, - 'max_resource_volume_mb': {'key': 'maxResourceVolumeMB', 'type': 'int'}, - 'memory_gb': {'key': 'memoryGB', 'type': 'float'}, - 'low_priority_capable': {'key': 'lowPriorityCapable', 'type': 'bool'}, - 'premium_io': {'key': 'premiumIO', 'type': 'bool'}, - 'estimated_vm_prices': {'key': 'estimatedVMPrices', 'type': 'EstimatedVMPrices'}, - 'supported_compute_types': {'key': 'supportedComputeTypes', 'type': '[str]'}, + "name": {"key": "name", "type": "str"}, + "family": {"key": "family", "type": "str"}, + "v_cp_us": {"key": "vCPUs", "type": "int"}, + "gpus": {"key": "gpus", "type": "int"}, + "os_vhd_size_mb": {"key": "osVhdSizeMB", "type": "int"}, + "max_resource_volume_mb": {"key": "maxResourceVolumeMB", "type": "int"}, + "memory_gb": {"key": "memoryGB", "type": "float"}, + "low_priority_capable": {"key": "lowPriorityCapable", "type": "bool"}, + "premium_io": {"key": "premiumIO", "type": "bool"}, + "estimated_vm_prices": {"key": "estimatedVMPrices", "type": "EstimatedVMPrices"}, + "supported_compute_types": {"key": "supportedComputeTypes", "type": "[str]"}, } def __init__( self, *, - estimated_vm_prices: Optional["EstimatedVMPrices"] = None, + estimated_vm_prices: Optional["_models.EstimatedVMPrices"] = None, supported_compute_types: Optional[List[str]] = None, **kwargs ): - super(VirtualMachineSize, self).__init__(**kwargs) + """ + :keyword estimated_vm_prices: The estimated price information for using a VM. + :paramtype estimated_vm_prices: ~azure.mgmt.machinelearningservices.models.EstimatedVMPrices + :keyword supported_compute_types: Specifies the compute types supported by the virtual machine + size. + :paramtype supported_compute_types: list[str] + """ + super().__init__(**kwargs) self.name = None self.family = None self.v_cp_us = None @@ -3539,45 +24230,44 @@ def __init__( self.supported_compute_types = supported_compute_types -class VirtualMachineSizeListResult(msrest.serialization.Model): +class VirtualMachineSizeListResult(_serialization.Model): """The List Virtual Machine size operation response. - :param aml_compute: The list of virtual machine sizes supported by AmlCompute. - :type aml_compute: list[~azure.mgmt.machinelearningservices.models.VirtualMachineSize] + :ivar value: The list of virtual machine sizes supported by AmlCompute. + :vartype value: list[~azure.mgmt.machinelearningservices.models.VirtualMachineSize] """ _attribute_map = { - 'aml_compute': {'key': 'amlCompute', 'type': '[VirtualMachineSize]'}, + "value": {"key": "value", "type": "[VirtualMachineSize]"}, } - def __init__( - self, - *, - aml_compute: Optional[List["VirtualMachineSize"]] = None, - **kwargs - ): - super(VirtualMachineSizeListResult, self).__init__(**kwargs) - self.aml_compute = aml_compute + def __init__(self, *, value: Optional[List["_models.VirtualMachineSize"]] = None, **kwargs): + """ + :keyword value: The list of virtual machine sizes supported by AmlCompute. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.VirtualMachineSize] + """ + super().__init__(**kwargs) + self.value = value -class VirtualMachineSshCredentials(msrest.serialization.Model): +class VirtualMachineSshCredentials(_serialization.Model): """Admin credentials for virtual machine. - :param username: Username of admin account. - :type username: str - :param password: Password of admin account. - :type password: str - :param public_key_data: Public key data. - :type public_key_data: str - :param private_key_data: Private key data. - :type private_key_data: str + :ivar username: Username of admin account. + :vartype username: str + :ivar password: Password of admin account. + :vartype password: str + :ivar public_key_data: Public key data. + :vartype public_key_data: str + :ivar private_key_data: Private key data. + :vartype private_key_data: str """ _attribute_map = { - 'username': {'key': 'username', 'type': 'str'}, - 'password': {'key': 'password', 'type': 'str'}, - 'public_key_data': {'key': 'publicKeyData', 'type': 'str'}, - 'private_key_data': {'key': 'privateKeyData', 'type': 'str'}, + "username": {"key": "username", "type": "str"}, + "password": {"key": "password", "type": "str"}, + "public_key_data": {"key": "publicKeyData", "type": "str"}, + "private_key_data": {"key": "privateKeyData", "type": "str"}, } def __init__( @@ -3589,135 +24279,292 @@ def __init__( private_key_data: Optional[str] = None, **kwargs ): - super(VirtualMachineSshCredentials, self).__init__(**kwargs) + """ + :keyword username: Username of admin account. + :paramtype username: str + :keyword password: Password of admin account. + :paramtype password: str + :keyword public_key_data: Public key data. + :paramtype public_key_data: str + :keyword private_key_data: Private key data. + :paramtype private_key_data: str + """ + super().__init__(**kwargs) self.username = username self.password = password self.public_key_data = public_key_data self.private_key_data = private_key_data -class Workspace(Resource): +class VolumeDefinition(_serialization.Model): + """VolumeDefinition. + + :ivar type: Type of Volume Definition. Possible Values: bind,volume,tmpfs,npipe. Known values + are: "bind", "volume", "tmpfs", and "npipe". + :vartype type: str or ~azure.mgmt.machinelearningservices.models.VolumeDefinitionType + :ivar read_only: Indicate whether to mount volume as readOnly. Default value for this is false. + :vartype read_only: bool + :ivar source: Source of the mount. For bind mounts this is the host path. + :vartype source: str + :ivar target: Target of the mount. For bind mounts this is the path in the container. + :vartype target: str + :ivar consistency: Consistency of the volume. + :vartype consistency: str + :ivar bind: Bind Options of the mount. + :vartype bind: ~azure.mgmt.machinelearningservices.models.BindOptions + :ivar volume: Volume Options of the mount. + :vartype volume: ~azure.mgmt.machinelearningservices.models.VolumeOptions + :ivar tmpfs: tmpfs option of the mount. + :vartype tmpfs: ~azure.mgmt.machinelearningservices.models.TmpfsOptions + """ + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "read_only": {"key": "readOnly", "type": "bool"}, + "source": {"key": "source", "type": "str"}, + "target": {"key": "target", "type": "str"}, + "consistency": {"key": "consistency", "type": "str"}, + "bind": {"key": "bind", "type": "BindOptions"}, + "volume": {"key": "volume", "type": "VolumeOptions"}, + "tmpfs": {"key": "tmpfs", "type": "TmpfsOptions"}, + } + + def __init__( + self, + *, + type: Union[str, "_models.VolumeDefinitionType"] = "bind", + read_only: Optional[bool] = None, + source: Optional[str] = None, + target: Optional[str] = None, + consistency: Optional[str] = None, + bind: Optional["_models.BindOptions"] = None, + volume: Optional["_models.VolumeOptions"] = None, + tmpfs: Optional["_models.TmpfsOptions"] = None, + **kwargs + ): + """ + :keyword type: Type of Volume Definition. Possible Values: bind,volume,tmpfs,npipe. Known + values are: "bind", "volume", "tmpfs", and "npipe". + :paramtype type: str or ~azure.mgmt.machinelearningservices.models.VolumeDefinitionType + :keyword read_only: Indicate whether to mount volume as readOnly. Default value for this is + false. + :paramtype read_only: bool + :keyword source: Source of the mount. For bind mounts this is the host path. + :paramtype source: str + :keyword target: Target of the mount. For bind mounts this is the path in the container. + :paramtype target: str + :keyword consistency: Consistency of the volume. + :paramtype consistency: str + :keyword bind: Bind Options of the mount. + :paramtype bind: ~azure.mgmt.machinelearningservices.models.BindOptions + :keyword volume: Volume Options of the mount. + :paramtype volume: ~azure.mgmt.machinelearningservices.models.VolumeOptions + :keyword tmpfs: tmpfs option of the mount. + :paramtype tmpfs: ~azure.mgmt.machinelearningservices.models.TmpfsOptions + """ + super().__init__(**kwargs) + self.type = type + self.read_only = read_only + self.source = source + self.target = target + self.consistency = consistency + self.bind = bind + self.volume = volume + self.tmpfs = tmpfs + + +class VolumeOptions(_serialization.Model): + """VolumeOptions. + + :ivar nocopy: Indicate whether volume is nocopy. + :vartype nocopy: bool + """ + + _attribute_map = { + "nocopy": {"key": "nocopy", "type": "bool"}, + } + + def __init__(self, *, nocopy: Optional[bool] = None, **kwargs): + """ + :keyword nocopy: Indicate whether volume is nocopy. + :paramtype nocopy: bool + """ + super().__init__(**kwargs) + self.nocopy = nocopy + + +class Workspace(Resource): # pylint: disable=too-many-instance-attributes """An object that represents a machine learning workspace. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: Specifies the resource ID. + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: Specifies the name of the resource. + :ivar name: The name of the resource. :vartype name: str - :param identity: The identity of the resource. - :type identity: ~azure.mgmt.machinelearningservices.models.Identity - :param location: Specifies the location of the resource. - :type location: str - :ivar type: Specifies the type of the resource. + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". :vartype type: str - :param tags: A set of tags. Contains resource tags defined as key/value pairs. - :type tags: dict[str, str] - :param sku: The sku of the workspace. - :type sku: ~azure.mgmt.machinelearningservices.models.Sku + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar identity: The identity of the resource. + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar location: Specifies the location of the resource. + :vartype location: str + :ivar tags: Contains resource tags defined as key/value pairs. + :vartype tags: dict[str, str] + :ivar sku: The sku of the workspace. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku :ivar workspace_id: The immutable id associated with this workspace. :vartype workspace_id: str - :param description: The description of this workspace. - :type description: str - :param friendly_name: The friendly name for this workspace. This name in mutable. - :type friendly_name: str - :ivar creation_time: The creation time of the machine learning workspace in ISO8601 format. - :vartype creation_time: ~datetime.datetime - :param key_vault: ARM id of the key vault associated with this workspace. This cannot be - changed once the workspace has been created. - :type key_vault: str - :param application_insights: ARM id of the application insights associated with this workspace. - This cannot be changed once the workspace has been created. - :type application_insights: str - :param container_registry: ARM id of the container registry associated with this workspace. - This cannot be changed once the workspace has been created. - :type container_registry: str - :param storage_account: ARM id of the storage account associated with this workspace. This + :ivar description: The description of this workspace. + :vartype description: str + :ivar friendly_name: The friendly name for this workspace. This name in mutable. + :vartype friendly_name: str + :ivar key_vault: ARM id of the key vault associated with this workspace. This cannot be changed + once the workspace has been created. + :vartype key_vault: str + :ivar application_insights: ARM id of the application insights associated with this workspace. + :vartype application_insights: str + :ivar container_registry: ARM id of the container registry associated with this workspace. + :vartype container_registry: str + :ivar storage_account: ARM id of the storage account associated with this workspace. This cannot be changed once the workspace has been created. - :type storage_account: str - :param discovery_url: Url for the discovery service to identify regional endpoints for machine + :vartype storage_account: str + :ivar discovery_url: Url for the discovery service to identify regional endpoints for machine learning experimentation services. - :type discovery_url: str + :vartype discovery_url: str :ivar provisioning_state: The current deployment state of workspace resource. The - provisioningState is to indicate states for resource provisioning. Possible values include: - "Unknown", "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + provisioningState is to indicate states for resource provisioning. Known values are: "Unknown", + "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled", and "SoftDeleted". :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param encryption: The encryption settings of Azure ML workspace. - :type encryption: ~azure.mgmt.machinelearningservices.models.EncryptionProperty - :param hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data + ~azure.mgmt.machinelearningservices.models.WorkspaceProvisioningState + :ivar encryption: The encryption settings of Azure ML workspace. + :vartype encryption: ~azure.mgmt.machinelearningservices.models.EncryptionProperty + :ivar hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data collected by the service. - :type hbi_workspace: bool + :vartype hbi_workspace: bool :ivar service_provisioned_resource_group: The name of the managed resource group created by workspace RP in customer subscription if the workspace is CMK workspace. :vartype service_provisioned_resource_group: str :ivar private_link_count: Count of private connections in the workspace. :vartype private_link_count: int - :param image_build_compute: The compute name for image build. - :type image_build_compute: str - :param allow_public_access_when_behind_vnet: The flag to indicate whether to allow public - access when behind VNet. - :type allow_public_access_when_behind_vnet: bool + :ivar image_build_compute: The compute name for image build. + :vartype image_build_compute: str + :ivar allow_public_access_when_behind_vnet: The flag to indicate whether to allow public access + when behind VNet. + :vartype allow_public_access_when_behind_vnet: bool + :ivar public_network_access: Whether requests from Public Network are allowed. Known values + are: "Enabled" and "Disabled". + :vartype public_network_access: str or + ~azure.mgmt.machinelearningservices.models.PublicNetworkAccess :ivar private_endpoint_connections: The list of private endpoint connections in the workspace. :vartype private_endpoint_connections: list[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection] - :param shared_private_link_resources: The list of shared private link resources in this + :ivar shared_private_link_resources: The list of shared private link resources in this workspace. - :type shared_private_link_resources: + :vartype shared_private_link_resources: list[~azure.mgmt.machinelearningservices.models.SharedPrivateLinkResource] :ivar notebook_info: The notebook info of Azure ML workspace. :vartype notebook_info: ~azure.mgmt.machinelearningservices.models.NotebookResourceInfo + :ivar service_managed_resources_settings: The service managed resource settings. + :vartype service_managed_resources_settings: + ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings + :ivar primary_user_assigned_identity: The user assigned identity resource id that represents + the workspace identity. + :vartype primary_user_assigned_identity: str + :ivar tenant_id: The tenant id associated with this workspace. + :vartype tenant_id: str + :ivar storage_hns_enabled: If the storage associated with the workspace has hierarchical + namespace(HNS) enabled. + :vartype storage_hns_enabled: bool + :ivar ml_flow_tracking_uri: The URI associated with this workspace that machine learning flow + must point at to set up tracking. + :vartype ml_flow_tracking_uri: str + :ivar v1_legacy_mode: Enabling v1_legacy_mode may prevent you from using features provided by + the v2 API. + :vartype v1_legacy_mode: bool + :ivar soft_deleted_at: The timestamp when the workspace was soft deleted. + :vartype soft_deleted_at: str + :ivar scheduled_purge_date: The timestamp when the soft deleted workspace is going to be + purged. + :vartype scheduled_purge_date: str """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'workspace_id': {'readonly': True}, - 'creation_time': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - 'service_provisioned_resource_group': {'readonly': True}, - 'private_link_count': {'readonly': True}, - 'private_endpoint_connections': {'readonly': True}, - 'notebook_info': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'}, - 'description': {'key': 'properties.description', 'type': 'str'}, - 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'}, - 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, - 'key_vault': {'key': 'properties.keyVault', 'type': 'str'}, - 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'}, - 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'}, - 'storage_account': {'key': 'properties.storageAccount', 'type': 'str'}, - 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionProperty'}, - 'hbi_workspace': {'key': 'properties.hbiWorkspace', 'type': 'bool'}, - 'service_provisioned_resource_group': {'key': 'properties.serviceProvisionedResourceGroup', 'type': 'str'}, - 'private_link_count': {'key': 'properties.privateLinkCount', 'type': 'int'}, - 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'}, - 'allow_public_access_when_behind_vnet': {'key': 'properties.allowPublicAccessWhenBehindVnet', 'type': 'bool'}, - 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, - 'shared_private_link_resources': {'key': 'properties.sharedPrivateLinkResources', 'type': '[SharedPrivateLinkResource]'}, - 'notebook_info': {'key': 'properties.notebookInfo', 'type': 'NotebookResourceInfo'}, - } - - def __init__( - self, - *, - identity: Optional["Identity"] = None, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "workspace_id": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "service_provisioned_resource_group": {"readonly": True}, + "private_link_count": {"readonly": True}, + "private_endpoint_connections": {"readonly": True}, + "notebook_info": {"readonly": True}, + "tenant_id": {"readonly": True}, + "storage_hns_enabled": {"readonly": True}, + "ml_flow_tracking_uri": {"readonly": True}, + "soft_deleted_at": {"readonly": True}, + "scheduled_purge_date": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "location": {"key": "location", "type": "str"}, + "tags": {"key": "tags", "type": "{str}"}, + "sku": {"key": "sku", "type": "Sku"}, + "workspace_id": {"key": "properties.workspaceId", "type": "str"}, + "description": {"key": "properties.description", "type": "str"}, + "friendly_name": {"key": "properties.friendlyName", "type": "str"}, + "key_vault": {"key": "properties.keyVault", "type": "str"}, + "application_insights": {"key": "properties.applicationInsights", "type": "str"}, + "container_registry": {"key": "properties.containerRegistry", "type": "str"}, + "storage_account": {"key": "properties.storageAccount", "type": "str"}, + "discovery_url": {"key": "properties.discoveryUrl", "type": "str"}, + "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, + "encryption": {"key": "properties.encryption", "type": "EncryptionProperty"}, + "hbi_workspace": {"key": "properties.hbiWorkspace", "type": "bool"}, + "service_provisioned_resource_group": {"key": "properties.serviceProvisionedResourceGroup", "type": "str"}, + "private_link_count": {"key": "properties.privateLinkCount", "type": "int"}, + "image_build_compute": {"key": "properties.imageBuildCompute", "type": "str"}, + "allow_public_access_when_behind_vnet": {"key": "properties.allowPublicAccessWhenBehindVnet", "type": "bool"}, + "public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"}, + "private_endpoint_connections": { + "key": "properties.privateEndpointConnections", + "type": "[PrivateEndpointConnection]", + }, + "shared_private_link_resources": { + "key": "properties.sharedPrivateLinkResources", + "type": "[SharedPrivateLinkResource]", + }, + "notebook_info": {"key": "properties.notebookInfo", "type": "NotebookResourceInfo"}, + "service_managed_resources_settings": { + "key": "properties.serviceManagedResourcesSettings", + "type": "ServiceManagedResourcesSettings", + }, + "primary_user_assigned_identity": {"key": "properties.primaryUserAssignedIdentity", "type": "str"}, + "tenant_id": {"key": "properties.tenantId", "type": "str"}, + "storage_hns_enabled": {"key": "properties.storageHnsEnabled", "type": "bool"}, + "ml_flow_tracking_uri": {"key": "properties.mlFlowTrackingUri", "type": "str"}, + "v1_legacy_mode": {"key": "properties.v1LegacyMode", "type": "bool"}, + "soft_deleted_at": {"key": "properties.softDeletedAt", "type": "str"}, + "scheduled_purge_date": {"key": "properties.scheduledPurgeDate", "type": "str"}, + } + + def __init__( # pylint: disable=too-many-locals + self, + *, + identity: Optional["_models.ManagedServiceIdentity"] = None, location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, - sku: Optional["Sku"] = None, + sku: Optional["_models.Sku"] = None, description: Optional[str] = None, friendly_name: Optional[str] = None, key_vault: Optional[str] = None, @@ -3725,18 +24572,80 @@ def __init__( container_registry: Optional[str] = None, storage_account: Optional[str] = None, discovery_url: Optional[str] = None, - encryption: Optional["EncryptionProperty"] = None, - hbi_workspace: Optional[bool] = False, + encryption: Optional["_models.EncryptionProperty"] = None, + hbi_workspace: bool = False, image_build_compute: Optional[str] = None, - allow_public_access_when_behind_vnet: Optional[bool] = False, - shared_private_link_resources: Optional[List["SharedPrivateLinkResource"]] = None, + allow_public_access_when_behind_vnet: bool = False, + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None, + shared_private_link_resources: Optional[List["_models.SharedPrivateLinkResource"]] = None, + service_managed_resources_settings: Optional["_models.ServiceManagedResourcesSettings"] = None, + primary_user_assigned_identity: Optional[str] = None, + v1_legacy_mode: bool = False, **kwargs ): - super(Workspace, self).__init__(identity=identity, location=location, tags=tags, sku=sku, **kwargs) + """ + :keyword identity: The identity of the resource. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword location: Specifies the location of the resource. + :paramtype location: str + :keyword tags: Contains resource tags defined as key/value pairs. + :paramtype tags: dict[str, str] + :keyword sku: The sku of the workspace. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + :keyword description: The description of this workspace. + :paramtype description: str + :keyword friendly_name: The friendly name for this workspace. This name in mutable. + :paramtype friendly_name: str + :keyword key_vault: ARM id of the key vault associated with this workspace. This cannot be + changed once the workspace has been created. + :paramtype key_vault: str + :keyword application_insights: ARM id of the application insights associated with this + workspace. + :paramtype application_insights: str + :keyword container_registry: ARM id of the container registry associated with this workspace. + :paramtype container_registry: str + :keyword storage_account: ARM id of the storage account associated with this workspace. This + cannot be changed once the workspace has been created. + :paramtype storage_account: str + :keyword discovery_url: Url for the discovery service to identify regional endpoints for + machine learning experimentation services. + :paramtype discovery_url: str + :keyword encryption: The encryption settings of Azure ML workspace. + :paramtype encryption: ~azure.mgmt.machinelearningservices.models.EncryptionProperty + :keyword hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data + collected by the service. + :paramtype hbi_workspace: bool + :keyword image_build_compute: The compute name for image build. + :paramtype image_build_compute: str + :keyword allow_public_access_when_behind_vnet: The flag to indicate whether to allow public + access when behind VNet. + :paramtype allow_public_access_when_behind_vnet: bool + :keyword public_network_access: Whether requests from Public Network are allowed. Known values + are: "Enabled" and "Disabled". + :paramtype public_network_access: str or + ~azure.mgmt.machinelearningservices.models.PublicNetworkAccess + :keyword shared_private_link_resources: The list of shared private link resources in this + workspace. + :paramtype shared_private_link_resources: + list[~azure.mgmt.machinelearningservices.models.SharedPrivateLinkResource] + :keyword service_managed_resources_settings: The service managed resource settings. + :paramtype service_managed_resources_settings: + ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings + :keyword primary_user_assigned_identity: The user assigned identity resource id that represents + the workspace identity. + :paramtype primary_user_assigned_identity: str + :keyword v1_legacy_mode: Enabling v1_legacy_mode may prevent you from using features provided + by the v2 API. + :paramtype v1_legacy_mode: bool + """ + super().__init__(**kwargs) + self.identity = identity + self.location = location + self.tags = tags + self.sku = sku self.workspace_id = None self.description = description self.friendly_name = friendly_name - self.creation_time = None self.key_vault = key_vault self.application_insights = application_insights self.container_registry = container_registry @@ -3749,197 +24658,398 @@ def __init__( self.private_link_count = None self.image_build_compute = image_build_compute self.allow_public_access_when_behind_vnet = allow_public_access_when_behind_vnet + self.public_network_access = public_network_access self.private_endpoint_connections = None self.shared_private_link_resources = shared_private_link_resources self.notebook_info = None + self.service_managed_resources_settings = service_managed_resources_settings + self.primary_user_assigned_identity = primary_user_assigned_identity + self.tenant_id = None + self.storage_hns_enabled = None + self.ml_flow_tracking_uri = None + self.v1_legacy_mode = v1_legacy_mode + self.soft_deleted_at = None + self.scheduled_purge_date = None + + +class WorkspaceConnectionAccessKey(_serialization.Model): + """WorkspaceConnectionAccessKey. + + :ivar access_key_id: + :vartype access_key_id: str + :ivar secret_access_key: + :vartype secret_access_key: str + """ + + _attribute_map = { + "access_key_id": {"key": "accessKeyId", "type": "str"}, + "secret_access_key": {"key": "secretAccessKey", "type": "str"}, + } + + def __init__(self, *, access_key_id: Optional[str] = None, secret_access_key: Optional[str] = None, **kwargs): + """ + :keyword access_key_id: + :paramtype access_key_id: str + :keyword secret_access_key: + :paramtype secret_access_key: str + """ + super().__init__(**kwargs) + self.access_key_id = access_key_id + self.secret_access_key = secret_access_key + + +class WorkspaceConnectionManagedIdentity(_serialization.Model): + """WorkspaceConnectionManagedIdentity. + + :ivar resource_id: + :vartype resource_id: str + :ivar client_id: + :vartype client_id: str + """ + + _attribute_map = { + "resource_id": {"key": "resourceId", "type": "str"}, + "client_id": {"key": "clientId", "type": "str"}, + } + + def __init__(self, *, resource_id: Optional[str] = None, client_id: Optional[str] = None, **kwargs): + """ + :keyword resource_id: + :paramtype resource_id: str + :keyword client_id: + :paramtype client_id: str + """ + super().__init__(**kwargs) + self.resource_id = resource_id + self.client_id = client_id + + +class WorkspaceConnectionPersonalAccessToken(_serialization.Model): + """WorkspaceConnectionPersonalAccessToken. + + :ivar pat: + :vartype pat: str + """ + + _attribute_map = { + "pat": {"key": "pat", "type": "str"}, + } + def __init__(self, *, pat: Optional[str] = None, **kwargs): + """ + :keyword pat: + :paramtype pat: str + """ + super().__init__(**kwargs) + self.pat = pat -class WorkspaceConnection(msrest.serialization.Model): - """Workspace connection. + +class WorkspaceConnectionPropertiesV2BasicResource(Resource): + """WorkspaceConnectionPropertiesV2BasicResource. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: ResourceId of the workspace connection. + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: Friendly name of the workspace connection. + :ivar name: The name of the resource. :vartype name: str - :ivar type: Resource type of workspace connection. + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". :vartype type: str - :param category: Category of the workspace connection. - :type category: str - :param target: Target of the workspace connection. - :type target: str - :param auth_type: Authorization type of the workspace connection. - :type auth_type: str - :param value: Value details of the workspace connection. - :type value: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2 """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'category': {'key': 'properties.category', 'type': 'str'}, - 'target': {'key': 'properties.target', 'type': 'str'}, - 'auth_type': {'key': 'properties.authType', 'type': 'str'}, - 'value': {'key': 'properties.value', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "WorkspaceConnectionPropertiesV2"}, } - def __init__( - self, - *, - category: Optional[str] = None, - target: Optional[str] = None, - auth_type: Optional[str] = None, - value: Optional[str] = None, - **kwargs - ): - super(WorkspaceConnection, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.category = category - self.target = target - self.auth_type = auth_type - self.value = value + def __init__(self, *, properties: "_models.WorkspaceConnectionPropertiesV2", **kwargs): + """ + :keyword properties: Required. + :paramtype properties: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2 + """ + super().__init__(**kwargs) + self.properties = properties -class WorkspaceConnectionDto(msrest.serialization.Model): - """object used for creating workspace connection. +class WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult(_serialization.Model): + """WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult. + + Variables are only populated by the server, and will be ignored when sending a request. - :param name: Friendly name of the workspace connection. - :type name: str - :param category: Category of the workspace connection. - :type category: str - :param target: Target of the workspace connection. - :type target: str - :param auth_type: Authorization type of the workspace connection. - :type auth_type: str - :param value: Value details of the workspace connection. - :type value: str + :ivar value: + :vartype value: + list[~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource] + :ivar next_link: + :vartype next_link: str """ + _validation = { + "next_link": {"readonly": True}, + } + _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'category': {'key': 'properties.category', 'type': 'str'}, - 'target': {'key': 'properties.target', 'type': 'str'}, - 'auth_type': {'key': 'properties.authType', 'type': 'str'}, - 'value': {'key': 'properties.value', 'type': 'str'}, + "value": {"key": "value", "type": "[WorkspaceConnectionPropertiesV2BasicResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, } def __init__( - self, - *, - name: Optional[str] = None, - category: Optional[str] = None, - target: Optional[str] = None, - auth_type: Optional[str] = None, - value: Optional[str] = None, - **kwargs + self, *, value: Optional[List["_models.WorkspaceConnectionPropertiesV2BasicResource"]] = None, **kwargs ): - super(WorkspaceConnectionDto, self).__init__(**kwargs) - self.name = name - self.category = category - self.target = target - self.auth_type = auth_type + """ + :keyword value: + :paramtype value: + list[~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource] + """ + super().__init__(**kwargs) self.value = value + self.next_link = None -class WorkspaceListResult(msrest.serialization.Model): - """The result of a request to list machine learning workspaces. +class WorkspaceConnectionServicePrincipal(_serialization.Model): + """WorkspaceConnectionServicePrincipal. - :param value: The list of machine learning workspaces. Since this list may be incomplete, the - nextLink field should be used to request the next list of machine learning workspaces. - :type value: list[~azure.mgmt.machinelearningservices.models.Workspace] - :param next_link: The URI that can be used to request the next list of machine learning - workspaces. - :type next_link: str + :ivar client_id: + :vartype client_id: str + :ivar client_secret: + :vartype client_secret: str + :ivar tenant_id: + :vartype tenant_id: str """ _attribute_map = { - 'value': {'key': 'value', 'type': '[Workspace]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "client_id": {"key": "clientId", "type": "str"}, + "client_secret": {"key": "clientSecret", "type": "str"}, + "tenant_id": {"key": "tenantId", "type": "str"}, } def __init__( self, *, - value: Optional[List["Workspace"]] = None, - next_link: Optional[str] = None, + client_id: Optional[str] = None, + client_secret: Optional[str] = None, + tenant_id: Optional[str] = None, **kwargs ): - super(WorkspaceListResult, self).__init__(**kwargs) - self.value = value - self.next_link = next_link + """ + :keyword client_id: + :paramtype client_id: str + :keyword client_secret: + :paramtype client_secret: str + :keyword tenant_id: + :paramtype tenant_id: str + """ + super().__init__(**kwargs) + self.client_id = client_id + self.client_secret = client_secret + self.tenant_id = tenant_id -class WorkspaceSku(msrest.serialization.Model): - """AML workspace sku information. +class WorkspaceConnectionSharedAccessSignature(_serialization.Model): + """WorkspaceConnectionSharedAccessSignature. - Variables are only populated by the server, and will be ignored when sending a request. + :ivar sas: + :vartype sas: str + """ - :ivar resource_type: - :vartype resource_type: str - :ivar skus: The list of workspace sku settings. - :vartype skus: list[~azure.mgmt.machinelearningservices.models.SkuSettings] + _attribute_map = { + "sas": {"key": "sas", "type": "str"}, + } + + def __init__(self, *, sas: Optional[str] = None, **kwargs): + """ + :keyword sas: + :paramtype sas: str + """ + super().__init__(**kwargs) + self.sas = sas + + +class WorkspaceConnectionUsernamePassword(_serialization.Model): + """WorkspaceConnectionUsernamePassword. + + :ivar username: + :vartype username: str + :ivar password: + :vartype password: str """ - _validation = { - 'resource_type': {'readonly': True}, - 'skus': {'readonly': True}, + _attribute_map = { + "username": {"key": "username", "type": "str"}, + "password": {"key": "password", "type": "str"}, } + def __init__(self, *, username: Optional[str] = None, password: Optional[str] = None, **kwargs): + """ + :keyword username: + :paramtype username: str + :keyword password: + :paramtype password: str + """ + super().__init__(**kwargs) + self.username = username + self.password = password + + +class WorkspaceListResult(_serialization.Model): + """The result of a request to list machine learning workspaces. + + :ivar value: The list of machine learning workspaces. Since this list may be incomplete, the + nextLink field should be used to request the next list of machine learning workspaces. + :vartype value: list[~azure.mgmt.machinelearningservices.models.Workspace] + :ivar next_link: The URI that can be used to request the next list of machine learning + workspaces. + :vartype next_link: str + """ + _attribute_map = { - 'resource_type': {'key': 'resourceType', 'type': 'str'}, - 'skus': {'key': 'skus', 'type': '[SkuSettings]'}, + "value": {"key": "value", "type": "[Workspace]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - **kwargs - ): - super(WorkspaceSku, self).__init__(**kwargs) - self.resource_type = None - self.skus = None + def __init__(self, *, value: Optional[List["_models.Workspace"]] = None, next_link: Optional[str] = None, **kwargs): + """ + :keyword value: The list of machine learning workspaces. Since this list may be incomplete, the + nextLink field should be used to request the next list of machine learning workspaces. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.Workspace] + :keyword next_link: The URI that can be used to request the next list of machine learning + workspaces. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link -class WorkspaceUpdateParameters(msrest.serialization.Model): +class WorkspaceUpdateParameters(_serialization.Model): # pylint: disable=too-many-instance-attributes """The parameters for updating a machine learning workspace. - :param tags: A set of tags. The resource tags for the machine learning workspace. - :type tags: dict[str, str] - :param sku: The sku of the workspace. - :type sku: ~azure.mgmt.machinelearningservices.models.Sku - :param description: The description of this workspace. - :type description: str - :param friendly_name: The friendly name for this workspace. - :type friendly_name: str + :ivar tags: The resource tags for the machine learning workspace. + :vartype tags: dict[str, str] + :ivar sku: The sku of the workspace. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + :ivar identity: The identity of the resource. + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar description: The description of this workspace. + :vartype description: str + :ivar friendly_name: The friendly name for this workspace. + :vartype friendly_name: str + :ivar image_build_compute: The compute name for image build. + :vartype image_build_compute: str + :ivar service_managed_resources_settings: The service managed resource settings. + :vartype service_managed_resources_settings: + ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings + :ivar primary_user_assigned_identity: The user assigned identity resource id that represents + the workspace identity. + :vartype primary_user_assigned_identity: str + :ivar public_network_access: Whether requests from Public Network are allowed. Known values + are: "Enabled" and "Disabled". + :vartype public_network_access: str or + ~azure.mgmt.machinelearningservices.models.PublicNetworkAccess + :ivar application_insights: ARM id of the application insights associated with this workspace. + :vartype application_insights: str + :ivar container_registry: ARM id of the container registry associated with this workspace. + :vartype container_registry: str + :ivar encryption: The encryption settings of the workspace. + :vartype encryption: ~azure.mgmt.machinelearningservices.models.EncryptionUpdateProperties """ _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'description': {'key': 'properties.description', 'type': 'str'}, - 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'}, + "tags": {"key": "tags", "type": "{str}"}, + "sku": {"key": "sku", "type": "Sku"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "description": {"key": "properties.description", "type": "str"}, + "friendly_name": {"key": "properties.friendlyName", "type": "str"}, + "image_build_compute": {"key": "properties.imageBuildCompute", "type": "str"}, + "service_managed_resources_settings": { + "key": "properties.serviceManagedResourcesSettings", + "type": "ServiceManagedResourcesSettings", + }, + "primary_user_assigned_identity": {"key": "properties.primaryUserAssignedIdentity", "type": "str"}, + "public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"}, + "application_insights": {"key": "properties.applicationInsights", "type": "str"}, + "container_registry": {"key": "properties.containerRegistry", "type": "str"}, + "encryption": {"key": "properties.encryption", "type": "EncryptionUpdateProperties"}, } def __init__( self, *, tags: Optional[Dict[str, str]] = None, - sku: Optional["Sku"] = None, + sku: Optional["_models.Sku"] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, description: Optional[str] = None, friendly_name: Optional[str] = None, + image_build_compute: Optional[str] = None, + service_managed_resources_settings: Optional["_models.ServiceManagedResourcesSettings"] = None, + primary_user_assigned_identity: Optional[str] = None, + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None, + application_insights: Optional[str] = None, + container_registry: Optional[str] = None, + encryption: Optional["_models.EncryptionUpdateProperties"] = None, **kwargs ): - super(WorkspaceUpdateParameters, self).__init__(**kwargs) + """ + :keyword tags: The resource tags for the machine learning workspace. + :paramtype tags: dict[str, str] + :keyword sku: The sku of the workspace. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + :keyword identity: The identity of the resource. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword description: The description of this workspace. + :paramtype description: str + :keyword friendly_name: The friendly name for this workspace. + :paramtype friendly_name: str + :keyword image_build_compute: The compute name for image build. + :paramtype image_build_compute: str + :keyword service_managed_resources_settings: The service managed resource settings. + :paramtype service_managed_resources_settings: + ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings + :keyword primary_user_assigned_identity: The user assigned identity resource id that represents + the workspace identity. + :paramtype primary_user_assigned_identity: str + :keyword public_network_access: Whether requests from Public Network are allowed. Known values + are: "Enabled" and "Disabled". + :paramtype public_network_access: str or + ~azure.mgmt.machinelearningservices.models.PublicNetworkAccess + :keyword application_insights: ARM id of the application insights associated with this + workspace. + :paramtype application_insights: str + :keyword container_registry: ARM id of the container registry associated with this workspace. + :paramtype container_registry: str + :keyword encryption: The encryption settings of the workspace. + :paramtype encryption: ~azure.mgmt.machinelearningservices.models.EncryptionUpdateProperties + """ + super().__init__(**kwargs) self.tags = tags self.sku = sku + self.identity = identity self.description = description self.friendly_name = friendly_name + self.image_build_compute = image_build_compute + self.service_managed_resources_settings = service_managed_resources_settings + self.primary_user_assigned_identity = primary_user_assigned_identity + self.public_network_access = public_network_access + self.application_insights = application_insights + self.container_registry = container_registry + self.encryption = encryption diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_patch.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_patch.py new file mode 100644 index 000000000000..f7dd32510333 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/__init__.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/__init__.py index 516999b100d8..261f5da876a3 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/__init__.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/__init__.py @@ -8,28 +8,84 @@ from ._operations import Operations from ._workspaces_operations import WorkspacesOperations -from ._workspace_features_operations import WorkspaceFeaturesOperations -from ._notebooks_operations import NotebooksOperations from ._usages_operations import UsagesOperations from ._virtual_machine_sizes_operations import VirtualMachineSizesOperations from ._quotas_operations import QuotasOperations -from ._workspace_connections_operations import WorkspaceConnectionsOperations -from ._machine_learning_compute_operations import MachineLearningComputeOperations -from ._azure_machine_learning_workspaces_operations import AzureMachineLearningWorkspacesOperationsMixin +from ._compute_operations import ComputeOperations from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations from ._private_link_resources_operations import PrivateLinkResourcesOperations +from ._workspace_connections_operations import WorkspaceConnectionsOperations +from ._registry_code_containers_operations import RegistryCodeContainersOperations +from ._registry_code_versions_operations import RegistryCodeVersionsOperations +from ._registry_component_containers_operations import RegistryComponentContainersOperations +from ._registry_component_versions_operations import RegistryComponentVersionsOperations +from ._registry_environment_containers_operations import RegistryEnvironmentContainersOperations +from ._registry_environment_versions_operations import RegistryEnvironmentVersionsOperations +from ._registry_model_containers_operations import RegistryModelContainersOperations +from ._registry_model_versions_operations import RegistryModelVersionsOperations +from ._batch_endpoints_operations import BatchEndpointsOperations +from ._batch_deployments_operations import BatchDeploymentsOperations +from ._code_containers_operations import CodeContainersOperations +from ._code_versions_operations import CodeVersionsOperations +from ._component_containers_operations import ComponentContainersOperations +from ._component_versions_operations import ComponentVersionsOperations +from ._data_containers_operations import DataContainersOperations +from ._data_versions_operations import DataVersionsOperations +from ._datastores_operations import DatastoresOperations +from ._environment_containers_operations import EnvironmentContainersOperations +from ._environment_versions_operations import EnvironmentVersionsOperations +from ._jobs_operations import JobsOperations +from ._labeling_jobs_operations import LabelingJobsOperations +from ._model_containers_operations import ModelContainersOperations +from ._model_versions_operations import ModelVersionsOperations +from ._online_endpoints_operations import OnlineEndpointsOperations +from ._online_deployments_operations import OnlineDeploymentsOperations +from ._schedules_operations import SchedulesOperations +from ._workspace_features_operations import WorkspaceFeaturesOperations +from ._registries_operations import RegistriesOperations + +from ._patch import __all__ as _patch_all +from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk __all__ = [ - 'Operations', - 'WorkspacesOperations', - 'WorkspaceFeaturesOperations', - 'NotebooksOperations', - 'UsagesOperations', - 'VirtualMachineSizesOperations', - 'QuotasOperations', - 'WorkspaceConnectionsOperations', - 'MachineLearningComputeOperations', - 'AzureMachineLearningWorkspacesOperationsMixin', - 'PrivateEndpointConnectionsOperations', - 'PrivateLinkResourcesOperations', + "Operations", + "WorkspacesOperations", + "UsagesOperations", + "VirtualMachineSizesOperations", + "QuotasOperations", + "ComputeOperations", + "PrivateEndpointConnectionsOperations", + "PrivateLinkResourcesOperations", + "WorkspaceConnectionsOperations", + "RegistryCodeContainersOperations", + "RegistryCodeVersionsOperations", + "RegistryComponentContainersOperations", + "RegistryComponentVersionsOperations", + "RegistryEnvironmentContainersOperations", + "RegistryEnvironmentVersionsOperations", + "RegistryModelContainersOperations", + "RegistryModelVersionsOperations", + "BatchEndpointsOperations", + "BatchDeploymentsOperations", + "CodeContainersOperations", + "CodeVersionsOperations", + "ComponentContainersOperations", + "ComponentVersionsOperations", + "DataContainersOperations", + "DataVersionsOperations", + "DatastoresOperations", + "EnvironmentContainersOperations", + "EnvironmentVersionsOperations", + "JobsOperations", + "LabelingJobsOperations", + "ModelContainersOperations", + "ModelVersionsOperations", + "OnlineEndpointsOperations", + "OnlineDeploymentsOperations", + "SchedulesOperations", + "WorkspaceFeaturesOperations", + "RegistriesOperations", ] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_azure_machine_learning_workspaces_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_azure_machine_learning_workspaces_operations.py deleted file mode 100644 index b39b1e769d9a..000000000000 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_azure_machine_learning_workspaces_operations.py +++ /dev/null @@ -1,94 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class AzureMachineLearningWorkspacesOperationsMixin(object): - - def list_skus( - self, - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.SkuListResult"] - """Lists all skus with associated features. - - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either SkuListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.SkuListResult] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.SkuListResult"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - if not next_link: - # Construct URL - url = self.list_skus.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('SkuListResult', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_skus.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces/skus'} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_deployments_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_deployments_operations.py new file mode 100644 index 000000000000..b6cb8edee913 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_deployments_operations.py @@ -0,0 +1,1106 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + subscription_id: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if order_by is not None: + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") + if top is not None: + _params["$top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url( + "endpoint_name", endpoint_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + "deploymentName": _SERIALIZER.url( + "deployment_name", deployment_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url( + "endpoint_name", endpoint_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + "deploymentName": _SERIALIZER.url( + "deployment_name", deployment_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class BatchDeploymentsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`batch_deployments` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.BatchDeployment"]: + """Lists Batch inference deployments in the workspace. + + Lists Batch inference deployments in the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Endpoint name. Required. + :type endpoint_name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Top of list. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BatchDeployment or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BatchDeploymentTrackedResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + order_by=order_by, + top=top, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("BatchDeploymentTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete Batch Inference deployment (asynchronous). + + Delete Batch Inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference deployment identifier. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any + ) -> _models.BatchDeployment: + """Gets a batch inference deployment by id. + + Gets a batch inference deployment by id. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch deployments. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BatchDeployment or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.BatchDeployment + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BatchDeployment] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("BatchDeployment", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties, IO], + **kwargs: Any + ) -> Optional[_models.BatchDeployment]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.BatchDeployment]] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("BatchDeployment", pipeline_response) + + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BatchDeployment]: + """Update a batch inference deployment (asynchronous). + + Update a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. Required. + :type deployment_name: str + :param body: Batch inference deployment definition object. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BatchDeployment]: + """Update a batch inference deployment (asynchronous). + + Update a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. Required. + :type deployment_name: str + :param body: Batch inference deployment definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties, IO], + **kwargs: Any + ) -> LROPoller[_models.BatchDeployment]: + """Update a batch inference deployment (asynchronous). + + Update a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. Required. + :type deployment_name: str + :param body: Batch inference deployment definition object. Is either a model type or a IO type. + Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties + or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BatchDeployment] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("BatchDeployment", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.BatchDeployment, IO], + **kwargs: Any + ) -> _models.BatchDeployment: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BatchDeployment] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "BatchDeployment") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("BatchDeployment", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("BatchDeployment", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.BatchDeployment, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BatchDeployment]: + """Creates/updates a batch inference deployment (asynchronous). + + Creates/updates a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. Required. + :type deployment_name: str + :param body: Batch inference deployment definition object. Required. + :type body: ~azure.mgmt.machinelearningservices.models.BatchDeployment + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BatchDeployment]: + """Creates/updates a batch inference deployment (asynchronous). + + Creates/updates a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. Required. + :type deployment_name: str + :param body: Batch inference deployment definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.BatchDeployment, IO], + **kwargs: Any + ) -> LROPoller[_models.BatchDeployment]: + """Creates/updates a batch inference deployment (asynchronous). + + Creates/updates a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. Required. + :type deployment_name: str + :param body: Batch inference deployment definition object. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.BatchDeployment or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BatchDeployment] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("BatchDeployment", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_endpoints_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_endpoints_operations.py new file mode 100644 index 000000000000..db7023432d51 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_endpoints_operations.py @@ -0,0 +1,1127 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + count: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if count is not None: + _params["count"] = _SERIALIZER.query("count", count, "int") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url( + "endpoint_name", endpoint_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url( + "endpoint_name", endpoint_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_keys_request( + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/listkeys", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class BatchEndpointsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`batch_endpoints` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + count: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.BatchEndpoint"]: + """Lists Batch inference endpoint in the workspace. + + Lists Batch inference endpoint in the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param count: Number of endpoints to be retrieved in a page of results. Default value is None. + :type count: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BatchEndpoint or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BatchEndpointTrackedResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + count=count, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("BatchEndpointTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete Batch Inference Endpoint (asynchronous). + + Delete Batch Inference Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference Endpoint name. Required. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> _models.BatchEndpoint: + """Gets a batch inference endpoint by name. + + Gets a batch inference endpoint by name. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Name for the Batch Endpoint. Required. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BatchEndpoint or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.BatchEndpoint + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.BatchEndpoint] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("BatchEndpoint", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.PartialMinimalTrackedResourceWithIdentity, IO], + **kwargs: Any + ) -> Optional[_models.BatchEndpoint]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.BatchEndpoint]] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialMinimalTrackedResourceWithIdentity") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("BatchEndpoint", pipeline_response) + + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.PartialMinimalTrackedResourceWithIdentity, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BatchEndpoint]: + """Update a batch inference endpoint (asynchronous). + + Update a batch inference endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. Required. + :type endpoint_name: str + :param body: Mutable batch inference endpoint definition object. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BatchEndpoint]: + """Update a batch inference endpoint (asynchronous). + + Update a batch inference endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. Required. + :type endpoint_name: str + :param body: Mutable batch inference endpoint definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.PartialMinimalTrackedResourceWithIdentity, IO], + **kwargs: Any + ) -> LROPoller[_models.BatchEndpoint]: + """Update a batch inference endpoint (asynchronous). + + Update a batch inference endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. Required. + :type endpoint_name: str + :param body: Mutable batch inference endpoint definition object. Is either a model type or a IO + type. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BatchEndpoint] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("BatchEndpoint", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.BatchEndpoint, IO], + **kwargs: Any + ) -> _models.BatchEndpoint: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BatchEndpoint] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "BatchEndpoint") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("BatchEndpoint", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("BatchEndpoint", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.BatchEndpoint, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BatchEndpoint]: + """Creates a batch inference endpoint (asynchronous). + + Creates a batch inference endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. Required. + :type endpoint_name: str + :param body: Batch inference endpoint definition object. Required. + :type body: ~azure.mgmt.machinelearningservices.models.BatchEndpoint + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BatchEndpoint]: + """Creates a batch inference endpoint (asynchronous). + + Creates a batch inference endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. Required. + :type endpoint_name: str + :param body: Batch inference endpoint definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.BatchEndpoint, IO], + **kwargs: Any + ) -> LROPoller[_models.BatchEndpoint]: + """Creates a batch inference endpoint (asynchronous). + + Creates a batch inference endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. Required. + :type endpoint_name: str + :param body: Batch inference endpoint definition object. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.BatchEndpoint or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.BatchEndpoint] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("BatchEndpoint", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + @distributed_trace + def list_keys( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> _models.EndpointAuthKeys: + """Lists batch Inference Endpoint keys. + + Lists batch Inference Endpoint keys. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference Endpoint name. Required. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EndpointAuthKeys or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.EndpointAuthKeys] + + request = build_list_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_keys.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("EndpointAuthKeys", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/listkeys"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_containers_operations.py new file mode 100644 index 000000000000..725847063443 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_containers_operations.py @@ -0,0 +1,571 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, workspace_name: str, subscription_id: str, *, skip: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class CodeContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`code_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, workspace_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> Iterable["_models.CodeContainer"]: + """List containers. + + List containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either CodeContainer or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.CodeContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("CodeContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}"} # type: ignore + + @distributed_trace + def get(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> _models.CodeContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeContainer] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("CodeContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}"} # type: ignore + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.CodeContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CodeContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CodeContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.CodeContainer, IO], + **kwargs: Any + ) -> _models.CodeContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeContainer] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "CodeContainer") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("CodeContainer", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("CodeContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_versions_operations.py new file mode 100644 index 000000000000..884a4d870242 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_versions_operations.py @@ -0,0 +1,620 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + name: str, + subscription_id: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if order_by is not None: + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") + if top is not None: + _params["$top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class CodeVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`code_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.CodeVersion"]: + """List versions. + + List versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either CodeVersion or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.CodeVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + order_by=order_by, + top=top, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("CodeVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> None: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.CodeVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeVersion] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("CodeVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}"} # type: ignore + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.CodeVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CodeVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CodeVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.CodeVersion, IO], + **kwargs: Any + ) -> _models.CodeVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeVersion] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "CodeVersion") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("CodeVersion", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("CodeVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_component_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_component_containers_operations.py new file mode 100644 index 000000000000..631d3fd4c72d --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_component_containers_operations.py @@ -0,0 +1,591 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class ComponentContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`component_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.ComponentContainer"]: + """List component containers. + + List component containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ComponentContainer or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ComponentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ComponentContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.ComponentContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentContainer] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ComponentContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}"} # type: ignore + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.ComponentContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ComponentContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ComponentContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.ComponentContainer, IO], + **kwargs: Any + ) -> _models.ComponentContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentContainer] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ComponentContainer") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ComponentContainer", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ComponentContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_component_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_component_versions_operations.py new file mode 100644 index 000000000000..d5b4fa8c0bb5 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_component_versions_operations.py @@ -0,0 +1,629 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + name: str, + subscription_id: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if order_by is not None: + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") + if top is not None: + _params["$top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class ComponentVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`component_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.ComponentVersion"]: + """List component versions. + + List component versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Component name. Required. + :type name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ComponentVersion or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ComponentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + order_by=order_by, + top=top, + skip=skip, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ComponentVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> None: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.ComponentVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentVersion] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ComponentVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}"} # type: ignore + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.ComponentVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ComponentVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ComponentVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.ComponentVersion, IO], + **kwargs: Any + ) -> _models.ComponentVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentVersion] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ComponentVersion") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ComponentVersion", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ComponentVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_compute_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_compute_operations.py new file mode 100644 index 000000000000..11faa8b60e43 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_compute_operations.py @@ -0,0 +1,2044 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, List, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, workspace_name: str, subscription_id: str, *, skip: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "computeName": _SERIALIZER.url("compute_name", compute_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "computeName": _SERIALIZER.url("compute_name", compute_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "computeName": _SERIALIZER.url("compute_name", compute_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, + workspace_name: str, + compute_name: str, + subscription_id: str, + *, + underlying_resource_action: Union[str, _models.UnderlyingResourceAction], + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "computeName": _SERIALIZER.url("compute_name", compute_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + _params["underlyingResourceAction"] = _SERIALIZER.query( + "underlying_resource_action", underlying_resource_action, "str" + ) + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_custom_services_request( + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/customServices", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "computeName": _SERIALIZER.url("compute_name", compute_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_nodes_request( + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "computeName": _SERIALIZER.url("compute_name", compute_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_keys_request( + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "computeName": _SERIALIZER.url("compute_name", compute_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_start_request( + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "computeName": _SERIALIZER.url("compute_name", compute_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_stop_request( + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "computeName": _SERIALIZER.url("compute_name", compute_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_restart_request( + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "computeName": _SERIALIZER.url("compute_name", compute_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_idle_shutdown_setting_request( + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/updateIdleShutdownSetting", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "computeName": _SERIALIZER.url("compute_name", compute_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class ComputeOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`compute` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, workspace_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> Iterable["_models.ComputeResource"]: + """Gets computes in specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ComputeResource or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.PaginatedComputeResourcesList] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("PaginatedComputeResourcesList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> _models.ComputeResource: + """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are + not returned - use 'keys' nested resource to get them. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComputeResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComputeResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComputeResource] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ComputeResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.ComputeResource, IO], + **kwargs: Any + ) -> _models.ComputeResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComputeResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "ComputeResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("ComputeResource", pipeline_response) + + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ComputeResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.ComputeResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ComputeResource]: + """Creates or updates compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify + that it does not exist yet. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: Payload with Machine Learning compute definition. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.ComputeResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ComputeResource]: + """Creates or updates compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify + that it does not exist yet. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: Payload with Machine Learning compute definition. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.ComputeResource, IO], + **kwargs: Any + ) -> LROPoller[_models.ComputeResource]: + """Creates or updates compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify + that it does not exist yet. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: Payload with Machine Learning compute definition. Is either a model type or + a IO type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.ComputeResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComputeResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ComputeResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.ClusterUpdateParameters, IO], + **kwargs: Any + ) -> _models.ComputeResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComputeResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "ClusterUpdateParameters") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ComputeResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.ClusterUpdateParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ComputeResource]: + """Updates properties of a compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: Additional parameters for cluster update. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.ClusterUpdateParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ComputeResource]: + """Updates properties of a compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: Additional parameters for cluster update. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.ClusterUpdateParameters, IO], + **kwargs: Any + ) -> LROPoller[_models.ComputeResource]: + """Updates properties of a compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: Additional parameters for cluster update. Is either a model type or a IO + type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.ClusterUpdateParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComputeResource] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ComputeResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + underlying_resource_action: Union[str, _models.UnderlyingResourceAction], + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + underlying_resource_action=underlying_resource_action, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + underlying_resource_action: Union[str, _models.UnderlyingResourceAction], + **kwargs: Any + ) -> LROPoller[None]: + """Deletes specified Machine Learning compute. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the + underlying compute from workspace if 'Detach'. Known values are: "Delete" and "Detach". + Required. + :type underlying_resource_action: str or + ~azure.mgmt.machinelearningservices.models.UnderlyingResourceAction + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + underlying_resource_action=underlying_resource_action, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + @overload + def update_custom_services( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + custom_services: List[_models.CustomService], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Updates the custom services list. The list of custom services provided shall be overwritten. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param custom_services: New list of Custom Services. Required. + :type custom_services: list[~azure.mgmt.machinelearningservices.models.CustomService] + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update_custom_services( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + custom_services: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Updates the custom services list. The list of custom services provided shall be overwritten. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param custom_services: New list of Custom Services. Required. + :type custom_services: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update_custom_services( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + custom_services: Union[List[_models.CustomService], IO], + **kwargs: Any + ) -> None: + """Updates the custom services list. The list of custom services provided shall be overwritten. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param custom_services: New list of Custom Services. Is either a list type or a IO type. + Required. + :type custom_services: list[~azure.mgmt.machinelearningservices.models.CustomService] or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(custom_services, (IO, bytes)): + _content = custom_services + else: + _json = self._serialize.body(custom_services, "[CustomService]") + + request = build_update_custom_services_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.update_custom_services.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + update_custom_services.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/customServices"} # type: ignore + + @distributed_trace + def list_nodes( + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> Iterable["_models.AmlComputeNodeInformation"]: + """Get the details (e.g IP address, port etc) of all the compute nodes in the compute. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either AmlComputeNodeInformation or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.AmlComputeNodeInformation] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.AmlComputeNodesInformation] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_nodes_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_nodes.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("AmlComputeNodesInformation", pipeline_response) + list_of_elem = deserialized.nodes + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list_nodes.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes"} # type: ignore + + @distributed_trace + def list_keys( + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> _models.ComputeSecrets: + """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComputeSecrets or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComputeSecrets + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComputeSecrets] + + request = build_list_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_keys.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ComputeSecrets", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys"} # type: ignore + + def _start_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_start_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._start_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _start_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start"} # type: ignore + + @distributed_trace + def begin_start( + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Posts a start action to a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._start_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_start.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start"} # type: ignore + + def _stop_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_stop_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._stop_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _stop_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop"} # type: ignore + + @distributed_trace + def begin_stop( + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Posts a stop action to a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._stop_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_stop.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop"} # type: ignore + + def _restart_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_restart_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._restart_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _restart_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart"} # type: ignore + + @distributed_trace + def begin_restart( + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Posts a restart action to a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._restart_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_restart.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart"} # type: ignore + + @overload + def update_idle_shutdown_setting( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.IdleShutdownSetting, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Updates the idle shutdown setting of a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating idle shutdown setting of specified ComputeInstance. + Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.IdleShutdownSetting + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update_idle_shutdown_setting( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Updates the idle shutdown setting of a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating idle shutdown setting of specified ComputeInstance. + Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update_idle_shutdown_setting( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.IdleShutdownSetting, IO], + **kwargs: Any + ) -> None: + """Updates the idle shutdown setting of a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating idle shutdown setting of specified ComputeInstance. + Is either a model type or a IO type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.IdleShutdownSetting or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "IdleShutdownSetting") + + request = build_update_idle_shutdown_setting_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.update_idle_shutdown_setting.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + update_idle_shutdown_setting.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/updateIdleShutdownSetting"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_containers_operations.py new file mode 100644 index 000000000000..1e01eea532b2 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_containers_operations.py @@ -0,0 +1,588 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class DataContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`data_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.DataContainer"]: + """List data containers. + + List data containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DataContainer or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.DataContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DataContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("DataContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}"} # type: ignore + + @distributed_trace + def get(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> _models.DataContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DataContainer] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("DataContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}"} # type: ignore + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.DataContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DataContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DataContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.DataContainer, IO], + **kwargs: Any + ) -> _models.DataContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DataContainer] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "DataContainer") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("DataContainer", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("DataContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_versions_operations.py new file mode 100644 index 000000000000..9eb45f8041ab --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_versions_operations.py @@ -0,0 +1,641 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + name: str, + subscription_id: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if order_by is not None: + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") + if top is not None: + _params["$top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if tags is not None: + _params["$tags"] = _SERIALIZER.query("tags", tags, "str") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class DataVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`data_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.DataVersionBase"]: + """List data versions in the data container. + + List data versions in the data container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Data container's name. Required. + :type name: str + :param order_by: Please choose OrderBy value from ['createdtime', 'modifiedtime']. Default + value is None. + :type order_by: str + :param top: Top count of results, top count cannot be greater than the page size. + If topCount > page size, results with be default page size count + will be returned. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :type tags: str + :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DataVersionBase or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.DataVersionBase] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DataVersionBaseResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + order_by=order_by, + top=top, + skip=skip, + tags=tags, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("DataVersionBaseResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> None: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.DataVersionBase: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataVersionBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DataVersionBase] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("DataVersionBase", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}"} # type: ignore + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.DataVersionBase, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DataVersionBase: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataVersionBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DataVersionBase: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataVersionBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.DataVersionBase, IO], + **kwargs: Any + ) -> _models.DataVersionBase: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataVersionBase or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataVersionBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DataVersionBase] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "DataVersionBase") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("DataVersionBase", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("DataVersionBase", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_datastores_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_datastores_operations.py new file mode 100644 index 000000000000..376e84f2887e --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_datastores_operations.py @@ -0,0 +1,741 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, List, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + count: int = 30, + is_default: Optional[bool] = None, + names: Optional[List[str]] = None, + search_text: Optional[str] = None, + order_by: Optional[str] = None, + order_by_asc: bool = False, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if count is not None: + _params["count"] = _SERIALIZER.query("count", count, "int") + if is_default is not None: + _params["isDefault"] = _SERIALIZER.query("is_default", is_default, "bool") + if names is not None: + _params["names"] = _SERIALIZER.query("names", names, "[str]", div=",") + if search_text is not None: + _params["searchText"] = _SERIALIZER.query("search_text", search_text, "str") + if order_by is not None: + _params["orderBy"] = _SERIALIZER.query("order_by", order_by, "str") + if order_by_asc is not None: + _params["orderByAsc"] = _SERIALIZER.query("order_by_asc", order_by_asc, "bool") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, + workspace_name: str, + name: str, + subscription_id: str, + *, + skip_validation: bool = False, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip_validation is not None: + _params["skipValidation"] = _SERIALIZER.query("skip_validation", skip_validation, "bool") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_secrets_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}/listSecrets", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class DatastoresOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`datastores` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + count: int = 30, + is_default: Optional[bool] = None, + names: Optional[List[str]] = None, + search_text: Optional[str] = None, + order_by: Optional[str] = None, + order_by_asc: bool = False, + **kwargs: Any + ) -> Iterable["_models.Datastore"]: + """List datastores. + + List datastores. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param count: Maximum number of results to return. Default value is 30. + :type count: int + :param is_default: Filter down to the workspace default datastore. Default value is None. + :type is_default: bool + :param names: Names of datastores to return. Default value is None. + :type names: list[str] + :param search_text: Text to search for in the datastore names. Default value is None. + :type search_text: str + :param order_by: Order by property (createdtime | modifiedtime | name). Default value is None. + :type order_by: str + :param order_by_asc: Order by property in ascending order. Default value is False. + :type order_by_asc: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either Datastore or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.Datastore] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DatastoreResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + count=count, + is_default=is_default, + names=names, + search_text=search_text, + order_by=order_by, + order_by_asc=order_by_asc, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("DatastoreResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: + """Delete datastore. + + Delete datastore. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Datastore name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}"} # type: ignore + + @distributed_trace + def get(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> _models.Datastore: + """Get datastore. + + Get datastore. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Datastore name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Datastore or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Datastore + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.Datastore] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("Datastore", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}"} # type: ignore + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.Datastore, + skip_validation: bool = False, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Datastore: + """Create or update datastore. + + Create or update datastore. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Datastore name. Required. + :type name: str + :param body: Datastore entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.Datastore + :param skip_validation: Flag to skip validation. Default value is False. + :type skip_validation: bool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Datastore or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Datastore + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + skip_validation: bool = False, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Datastore: + """Create or update datastore. + + Create or update datastore. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Datastore name. Required. + :type name: str + :param body: Datastore entity to create or update. Required. + :type body: IO + :param skip_validation: Flag to skip validation. Default value is False. + :type skip_validation: bool + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Datastore or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Datastore + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.Datastore, IO], + skip_validation: bool = False, + **kwargs: Any + ) -> _models.Datastore: + """Create or update datastore. + + Create or update datastore. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Datastore name. Required. + :type name: str + :param body: Datastore entity to create or update. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.Datastore or IO + :param skip_validation: Flag to skip validation. Default value is False. + :type skip_validation: bool + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Datastore or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Datastore + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Datastore] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "Datastore") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + skip_validation=skip_validation, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("Datastore", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("Datastore", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}"} # type: ignore + + @distributed_trace + def list_secrets( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.DatastoreSecrets: + """Get datastore secrets. + + Get datastore secrets. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Datastore name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DatastoreSecrets or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DatastoreSecrets + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.DatastoreSecrets] + + request = build_list_secrets_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_secrets.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("DatastoreSecrets", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_secrets.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}/listSecrets"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_containers_operations.py new file mode 100644 index 000000000000..28da1895db45 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_containers_operations.py @@ -0,0 +1,592 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class EnvironmentContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`environment_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.EnvironmentContainer"]: + """List environment containers. + + List environment containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either EnvironmentContainer or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("EnvironmentContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.EnvironmentContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentContainer] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}"} # type: ignore + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.EnvironmentContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EnvironmentContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EnvironmentContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.EnvironmentContainer, IO], + **kwargs: Any + ) -> _models.EnvironmentContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentContainer] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "EnvironmentContainer") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_versions_operations.py new file mode 100644 index 000000000000..60c35765d16c --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_versions_operations.py @@ -0,0 +1,629 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + name: str, + subscription_id: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if order_by is not None: + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") + if top is not None: + _params["$top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class EnvironmentVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`environment_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.EnvironmentVersion"]: + """List versions. + + List versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either EnvironmentVersion or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + order_by=order_by, + top=top, + skip=skip, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("EnvironmentVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> None: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.EnvironmentVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentVersion] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}"} # type: ignore + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.EnvironmentVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EnvironmentVersion: + """Creates or updates an EnvironmentVersion. + + Creates or updates an EnvironmentVersion. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Name of EnvironmentVersion. This is case-sensitive. Required. + :type name: str + :param version: Version of EnvironmentVersion. Required. + :type version: str + :param body: Definition of EnvironmentVersion. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EnvironmentVersion: + """Creates or updates an EnvironmentVersion. + + Creates or updates an EnvironmentVersion. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Name of EnvironmentVersion. This is case-sensitive. Required. + :type name: str + :param version: Version of EnvironmentVersion. Required. + :type version: str + :param body: Definition of EnvironmentVersion. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.EnvironmentVersion, IO], + **kwargs: Any + ) -> _models.EnvironmentVersion: + """Creates or updates an EnvironmentVersion. + + Creates or updates an EnvironmentVersion. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Name of EnvironmentVersion. This is case-sensitive. Required. + :type name: str + :param version: Version of EnvironmentVersion. Required. + :type version: str + :param body: Definition of EnvironmentVersion. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentVersion] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "EnvironmentVersion") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_jobs_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_jobs_operations.py new file mode 100644 index 000000000000..f819f4ac64da --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_jobs_operations.py @@ -0,0 +1,824 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + job_type: Optional[str] = None, + tag: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + scheduled: Optional[bool] = None, + schedule_id: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if job_type is not None: + _params["jobType"] = _SERIALIZER.query("job_type", job_type, "str") + if tag is not None: + _params["tag"] = _SERIALIZER.query("tag", tag, "str") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + if scheduled is not None: + _params["scheduled"] = _SERIALIZER.query("scheduled", scheduled, "bool") + if schedule_id is not None: + _params["scheduleId"] = _SERIALIZER.query("schedule_id", schedule_id, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "id": _SERIALIZER.url("id", id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "id": _SERIALIZER.url("id", id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "id": _SERIALIZER.url("id", id, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_cancel_request( + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "id": _SERIALIZER.url("id", id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class JobsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`jobs` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + job_type: Optional[str] = None, + tag: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + scheduled: Optional[bool] = None, + schedule_id: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.JobBase"]: + """Lists Jobs in the workspace. + + Lists Jobs in the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param job_type: Type of job to be returned. Default value is None. + :type job_type: str + :param tag: Jobs returned will have this tag key. Default value is None. + :type tag: str + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param scheduled: Indicator whether the job is scheduled job. Default value is None. + :type scheduled: bool + :param schedule_id: The scheduled id for listing the job triggered from. Default value is None. + :type schedule_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either JobBase or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.JobBase] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.JobBaseResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + job_type=job_type, + tag=tag, + list_view_type=list_view_type, + scheduled=scheduled, + schedule_id=schedule_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("JobBaseResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + + @distributed_trace + def begin_delete(self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any) -> LROPoller[None]: + """Deletes a Job (asynchronous). + + Deletes a Job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + + @distributed_trace + def get(self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any) -> _models.JobBase: + """Gets a Job by name/id. + + Gets a Job by name/id. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.JobBase] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("JobBase", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.JobBase, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.JobBase: + """Creates and executes a Job. + + Creates and executes a Job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :param body: Job definition object. Required. + :type body: ~azure.mgmt.machinelearningservices.models.JobBase + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.JobBase: + """Creates and executes a Job. + + Creates and executes a Job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :param body: Job definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, resource_group_name: str, workspace_name: str, id: str, body: Union[_models.JobBase, IO], **kwargs: Any + ) -> _models.JobBase: + """Creates and executes a Job. + + Creates and executes a Job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :param body: Job definition object. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.JobBase or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.JobBase] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "JobBase") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("JobBase", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("JobBase", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + + def _cancel_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_cancel_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._cancel_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _cancel_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel"} # type: ignore + + @distributed_trace + def begin_cancel(self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any) -> LROPoller[None]: + """Cancels a Job (asynchronous). + + Cancels a Job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._cancel_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_cancel.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_labeling_jobs_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_labeling_jobs_operations.py new file mode 100644 index 000000000000..b28f984e17d1 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_labeling_jobs_operations.py @@ -0,0 +1,1237 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + count: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if count is not None: + _params["count"] = _SERIALIZER.query("count", count, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "id": _SERIALIZER.url("id", id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, + workspace_name: str, + id: str, + subscription_id: str, + *, + include_job_instructions: bool = False, + include_label_categories: bool = False, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "id": _SERIALIZER.url("id", id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if include_job_instructions is not None: + _params["includeJobInstructions"] = _SERIALIZER.query( + "include_job_instructions", include_job_instructions, "bool" + ) + if include_label_categories is not None: + _params["includeLabelCategories"] = _SERIALIZER.query( + "include_label_categories", include_label_categories, "bool" + ) + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "id": _SERIALIZER.url("id", id, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_export_labels_request( + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "id": _SERIALIZER.url("id", id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_pause_request( + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/pause", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "id": _SERIALIZER.url("id", id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_resume_request( + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "id": _SERIALIZER.url("id", id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class LabelingJobsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`labeling_jobs` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + count: Optional[int] = None, + **kwargs: Any + ) -> Iterable["_models.LabelingJob"]: + """Lists labeling jobs in the workspace. + + Lists labeling jobs in the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param count: Number of labeling jobs to return. Default value is None. + :type count: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either LabelingJob or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.LabelingJobResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + count=count, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("LabelingJobResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> None: + """Delete a labeling job. + + Delete a labeling job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore + + @distributed_trace + def get( + self, + resource_group_name: str, + workspace_name: str, + id: str, + include_job_instructions: bool = False, + include_label_categories: bool = False, + **kwargs: Any + ) -> _models.LabelingJob: + """Gets a labeling job by name/id. + + Gets a labeling job by name/id. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param include_job_instructions: Boolean value to indicate whether to include JobInstructions + in response. Default value is False. + :type include_job_instructions: bool + :param include_label_categories: Boolean value to indicate Whether to include LabelCategories + in response. Default value is False. + :type include_label_categories: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LabelingJob or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.LabelingJob + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.LabelingJob] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + include_job_instructions=include_job_instructions, + include_label_categories=include_label_categories, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("LabelingJob", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.LabelingJob, IO], + **kwargs: Any + ) -> _models.LabelingJob: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.LabelingJob] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "LabelingJob") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("LabelingJob", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("LabelingJob", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.LabelingJob, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.LabelingJob]: + """Creates or updates a labeling job (asynchronous). + + Creates or updates a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: LabelingJob definition object. Required. + :type body: ~azure.mgmt.machinelearningservices.models.LabelingJob + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either LabelingJob or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.LabelingJob]: + """Creates or updates a labeling job (asynchronous). + + Creates or updates a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: LabelingJob definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either LabelingJob or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.LabelingJob, IO], + **kwargs: Any + ) -> LROPoller[_models.LabelingJob]: + """Creates or updates a labeling job (asynchronous). + + Creates or updates a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: LabelingJob definition object. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.LabelingJob or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either LabelingJob or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.LabelingJob] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("LabelingJob", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore + + def _export_labels_initial( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.ExportSummary, IO], + **kwargs: Any + ) -> Optional[_models.ExportSummary]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.ExportSummary]] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ExportSummary") + + request = build_export_labels_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._export_labels_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("ExportSummary", pipeline_response) + + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _export_labels_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels"} # type: ignore + + @overload + def begin_export_labels( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.ExportSummary, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ExportSummary]: + """Export labels from a labeling job (asynchronous). + + Export labels from a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: The export summary. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ExportSummary + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ExportSummary or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ExportSummary] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_export_labels( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ExportSummary]: + """Export labels from a labeling job (asynchronous). + + Export labels from a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: The export summary. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ExportSummary or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ExportSummary] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_export_labels( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.ExportSummary, IO], + **kwargs: Any + ) -> LROPoller[_models.ExportSummary]: + """Export labels from a labeling job (asynchronous). + + Export labels from a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: The export summary. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ExportSummary or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ExportSummary or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ExportSummary] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ExportSummary] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._export_labels_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ExportSummary", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_export_labels.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels"} # type: ignore + + @distributed_trace + def pause( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> None: + """Pause a labeling job. + + Pause a labeling job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_pause_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.pause.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + pause.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/pause"} # type: ignore + + def _resume_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_resume_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._resume_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _resume_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume"} # type: ignore + + @distributed_trace + def begin_resume(self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any) -> LROPoller[None]: + """Resume a labeling job (asynchronous). + + Resume a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._resume_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_resume.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_machine_learning_compute_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_machine_learning_compute_operations.py deleted file mode 100644 index 10261d10df59..000000000000 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_machine_learning_compute_operations.py +++ /dev/null @@ -1,912 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.arm_polling import ARMPolling - -from .. import models as _models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class MachineLearningComputeOperations(object): - """MachineLearningComputeOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = _models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_workspace( - self, - resource_group_name, # type: str - workspace_name, # type: str - skiptoken=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.PaginatedComputeResourcesList"] - """Gets computes in specified workspace. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param skiptoken: Continuation token for pagination. - :type skiptoken: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PaginatedComputeResourcesList or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedComputeResourcesList] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PaginatedComputeResourcesList"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - if not next_link: - # Construct URL - url = self.list_by_workspace.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if skiptoken is not None: - query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('PaginatedComputeResourcesList', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes'} # type: ignore - - def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.ComputeResource" - """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are - not returned - use 'keys' nested resource to get them. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComputeResource, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.ComputeResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - def _create_or_update_initial( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - parameters, # type: "_models.ComputeResource" - **kwargs # type: Any - ): - # type: (...) -> "_models.ComputeResource" - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._create_or_update_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'ComputeResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if response.status_code == 201: - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized - _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - def begin_create_or_update( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - parameters, # type: "_models.ComputeResource" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.ComputeResource"] - """Creates or updates compute. This call will overwrite a compute if it exists. This is a - nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify - that it does not exist yet. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :param parameters: Payload with Machine Learning compute definition. - :type parameters: ~azure.mgmt.machinelearningservices.models.ComputeResource - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either ComputeResource or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._create_or_update_initial( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - compute_name=compute_name, - parameters=parameters, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - def _update_initial( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - parameters, # type: "_models.ClusterUpdateParameters" - **kwargs # type: Any - ): - # type: (...) -> "_models.ComputeResource" - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._update_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'ClusterUpdateParameters') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - def begin_update( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - parameters, # type: "_models.ClusterUpdateParameters" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.ComputeResource"] - """Updates properties of a compute. This call will overwrite a compute if it exists. This is a - nonrecoverable operation. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :param parameters: Additional parameters for cluster update. - :type parameters: ~azure.mgmt.machinelearningservices.models.ClusterUpdateParameters - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either ComputeResource or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._update_initial( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - compute_name=compute_name, - parameters=parameters, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - def _delete_initial( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - underlying_resource_action, # type: Union[str, "_models.UnderlyingResourceAction"] - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - query_parameters['underlyingResourceAction'] = self._serialize.query("underlying_resource_action", underlying_resource_action, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - response_headers = {} - if response.status_code == 202: - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - - if cls: - return cls(pipeline_response, None, response_headers) - - _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - def begin_delete( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - underlying_resource_action, # type: Union[str, "_models.UnderlyingResourceAction"] - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] - """Deletes specified Machine Learning compute. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the - underlying compute from workspace if 'Detach'. - :type underlying_resource_action: str or ~azure.mgmt.machinelearningservices.models.UnderlyingResourceAction - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._delete_initial( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - compute_name=compute_name, - underlying_resource_action=underlying_resource_action, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - def list_nodes( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.AmlComputeNodesInformation" - """Get the details (e.g IP address, port etc) of all the compute nodes in the compute. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: AmlComputeNodesInformation, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.AmlComputeNodesInformation - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.AmlComputeNodesInformation"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.list_nodes.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize('AmlComputeNodesInformation', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - list_nodes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes'} # type: ignore - - def list_keys( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.ComputeSecrets" - """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc). - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComputeSecrets, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.ComputeSecrets - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeSecrets"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.list_keys.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ComputeSecrets', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys'} # type: ignore - - def start( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Posts a start action to a compute instance. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.start.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} # type: ignore - - def stop( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Posts a stop action to a compute instance. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.stop.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} # type: ignore - - def restart( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Posts a restart action to a compute instance. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.restart.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart'} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_containers_operations.py new file mode 100644 index 000000000000..6da15712315c --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_containers_operations.py @@ -0,0 +1,595 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + count: Optional[int] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if count is not None: + _params["count"] = _SERIALIZER.query("count", count, "int") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class ModelContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`model_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + count: Optional[int] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.ModelContainer"]: + """List model containers. + + List model containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param count: Maximum number of results to return. Default value is None. + :type count: int + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ModelContainer or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ModelContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + count=count, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ModelContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}"} # type: ignore + + @distributed_trace + def get(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> _models.ModelContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelContainer] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ModelContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}"} # type: ignore + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.ModelContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ModelContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ModelContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.ModelContainer, IO], + **kwargs: Any + ) -> _models.ModelContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelContainer] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ModelContainer") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ModelContainer", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ModelContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_versions_operations.py new file mode 100644 index 000000000000..3d75ff73847a --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_versions_operations.py @@ -0,0 +1,672 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + order_by: Optional[str] = None, + top: Optional[int] = None, + version: Optional[str] = None, + description: Optional[str] = None, + offset: Optional[int] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + feed: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if order_by is not None: + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") + if top is not None: + _params["$top"] = _SERIALIZER.query("top", top, "int") + if version is not None: + _params["version"] = _SERIALIZER.query("version", version, "str") + if description is not None: + _params["description"] = _SERIALIZER.query("description", description, "str") + if offset is not None: + _params["offset"] = _SERIALIZER.query("offset", offset, "int") + if tags is not None: + _params["tags"] = _SERIALIZER.query("tags", tags, "str") + if properties is not None: + _params["properties"] = _SERIALIZER.query("properties", properties, "str") + if feed is not None: + _params["feed"] = _SERIALIZER.query("feed", feed, "str") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class ModelVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`model_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + skip: Optional[str] = None, + order_by: Optional[str] = None, + top: Optional[int] = None, + version: Optional[str] = None, + description: Optional[str] = None, + offset: Optional[int] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + feed: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.ModelVersion"]: + """List model versions. + + List model versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Model name. This is case-sensitive. Required. + :type name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param version: Model version. Default value is None. + :type version: str + :param description: Model description. Default value is None. + :type description: str + :param offset: Number of initial results to skip. Default value is None. + :type offset: int + :param tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :type tags: str + :param properties: Comma-separated list of property names (and optionally values). Example: + prop1,prop2=value2. Default value is None. + :type properties: str + :param feed: Name of the feed. Default value is None. + :type feed: str + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ModelVersion or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ModelVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + skip=skip, + order_by=order_by, + top=top, + version=version, + description=description, + offset=offset, + tags=tags, + properties=properties, + feed=feed, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ModelVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> None: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.ModelVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelVersion] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ModelVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}"} # type: ignore + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.ModelVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ModelVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ModelVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.ModelVersion, IO], + **kwargs: Any + ) -> _models.ModelVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelVersion] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ModelVersion") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("ModelVersion", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("ModelVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_notebooks_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_notebooks_operations.py deleted file mode 100644 index 35f06ab915b0..000000000000 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_notebooks_operations.py +++ /dev/null @@ -1,166 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.arm_polling import ARMPolling - -from .. import models as _models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class NotebooksOperations(object): - """NotebooksOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = _models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def _prepare_initial( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.NotebookResourceInfo"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.NotebookResourceInfo"]] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self._prepare_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('NotebookResourceInfo', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _prepare_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore - - def begin_prepare( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.NotebookResourceInfo"] - """prepare. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either NotebookResourceInfo or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.NotebookResourceInfo] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.NotebookResourceInfo"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._prepare_initial( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('NotebookResourceInfo', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_prepare.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_deployments_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_deployments_operations.py new file mode 100644 index 000000000000..1ac0e2865900 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_deployments_operations.py @@ -0,0 +1,1463 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + subscription_id: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if order_by is not None: + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") + if top is not None: + _params["$top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url( + "endpoint_name", endpoint_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + "deploymentName": _SERIALIZER.url( + "deployment_name", deployment_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url( + "endpoint_name", endpoint_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + "deploymentName": _SERIALIZER.url( + "deployment_name", deployment_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_logs_request( + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/getLogs", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_skus_request( + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + subscription_id: str, + *, + count: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/skus", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if count is not None: + _params["count"] = _SERIALIZER.query("count", count, "int") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class OnlineDeploymentsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`online_deployments` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.OnlineDeployment"]: + """List Inference Endpoint Deployments. + + List Inference Endpoint Deployments. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Top of list. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OnlineDeployment or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.OnlineDeploymentTrackedResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + order_by=order_by, + top=top, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("OnlineDeploymentTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete Inference Endpoint Deployment (asynchronous). + + Delete Inference Endpoint Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any + ) -> _models.OnlineDeployment: + """Get Inference Deployment Deployment. + + Get Inference Deployment Deployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: OnlineDeployment or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.OnlineDeployment + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.OnlineDeployment] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("OnlineDeployment", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.PartialMinimalTrackedResourceWithSku, IO], + **kwargs: Any + ) -> Optional[_models.OnlineDeployment]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.OnlineDeployment]] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialMinimalTrackedResourceWithSku") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("OnlineDeployment", pipeline_response) + + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.PartialMinimalTrackedResourceWithSku, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.OnlineDeployment]: + """Update Online Deployment (asynchronous). + + Update Online Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSku + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.OnlineDeployment]: + """Update Online Deployment (asynchronous). + + Update Online Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.PartialMinimalTrackedResourceWithSku, IO], + **kwargs: Any + ) -> LROPoller[_models.OnlineDeployment]: + """Update Online Deployment (asynchronous). + + Update Online Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :param body: Online Endpoint entity to apply during operation. Is either a model type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSku or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.OnlineDeployment] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("OnlineDeployment", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.OnlineDeployment, IO], + **kwargs: Any + ) -> _models.OnlineDeployment: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.OnlineDeployment] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "OnlineDeployment") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("OnlineDeployment", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("OnlineDeployment", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.OnlineDeployment, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.OnlineDeployment]: + """Create or update Inference Endpoint Deployment (asynchronous). + + Create or update Inference Endpoint Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :param body: Inference Endpoint entity to apply during operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OnlineDeployment + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.OnlineDeployment]: + """Create or update Inference Endpoint Deployment (asynchronous). + + Create or update Inference Endpoint Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :param body: Inference Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.OnlineDeployment, IO], + **kwargs: Any + ) -> LROPoller[_models.OnlineDeployment]: + """Create or update Inference Endpoint Deployment (asynchronous). + + Create or update Inference Endpoint Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :param body: Inference Endpoint entity to apply during operation. Is either a model type or a + IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OnlineDeployment or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.OnlineDeployment] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("OnlineDeployment", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @overload + def get_logs( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.DeploymentLogsRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DeploymentLogs: + """Polls an Endpoint operation. + + Polls an Endpoint operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The name and identifier for the endpoint. Required. + :type deployment_name: str + :param body: The request containing parameters for retrieving logs. Required. + :type body: ~azure.mgmt.machinelearningservices.models.DeploymentLogsRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DeploymentLogs or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DeploymentLogs + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def get_logs( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DeploymentLogs: + """Polls an Endpoint operation. + + Polls an Endpoint operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The name and identifier for the endpoint. Required. + :type deployment_name: str + :param body: The request containing parameters for retrieving logs. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DeploymentLogs or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DeploymentLogs + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def get_logs( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.DeploymentLogsRequest, IO], + **kwargs: Any + ) -> _models.DeploymentLogs: + """Polls an Endpoint operation. + + Polls an Endpoint operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The name and identifier for the endpoint. Required. + :type deployment_name: str + :param body: The request containing parameters for retrieving logs. Is either a model type or a + IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.DeploymentLogsRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DeploymentLogs or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DeploymentLogs + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DeploymentLogs] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "DeploymentLogsRequest") + + request = build_get_logs_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.get_logs.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("DeploymentLogs", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_logs.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/getLogs"} # type: ignore + + @distributed_trace + def list_skus( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + count: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.SkuResource"]: + """List Inference Endpoint Deployment Skus. + + List Inference Endpoint Deployment Skus. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :param count: Number of Skus to be retrieved in a page of results. Default value is None. + :type count: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either SkuResource or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.SkuResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.SkuResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_skus_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + subscription_id=self._config.subscription_id, + count=count, + skip=skip, + api_version=api_version, + template_url=self.list_skus.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("SkuResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list_skus.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/skus"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_endpoints_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_endpoints_operations.py new file mode 100644 index 000000000000..ceda7558558c --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_endpoints_operations.py @@ -0,0 +1,1542 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + name: Optional[str] = None, + count: Optional[int] = None, + compute_type: Optional[Union[str, _models.EndpointComputeType]] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + order_by: Optional[Union[str, _models.OrderString]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if name is not None: + _params["name"] = _SERIALIZER.query("name", name, "str") + if count is not None: + _params["count"] = _SERIALIZER.query("count", count, "int") + if compute_type is not None: + _params["computeType"] = _SERIALIZER.query("compute_type", compute_type, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if tags is not None: + _params["tags"] = _SERIALIZER.query("tags", tags, "str") + if properties is not None: + _params["properties"] = _SERIALIZER.query("properties", properties, "str") + if order_by is not None: + _params["orderBy"] = _SERIALIZER.query("order_by", order_by, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url( + "endpoint_name", endpoint_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_keys_request( + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/listKeys", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_regenerate_keys_request( + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_token_request( + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/token", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class OnlineEndpointsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`online_endpoints` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: Optional[str] = None, + count: Optional[int] = None, + compute_type: Optional[Union[str, _models.EndpointComputeType]] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + order_by: Optional[Union[str, _models.OrderString]] = None, + **kwargs: Any + ) -> Iterable["_models.OnlineEndpoint"]: + """List Online Endpoints. + + List Online Endpoints. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Name of the endpoint. Default value is None. + :type name: str + :param count: Number of endpoints to be retrieved in a page of results. Default value is None. + :type count: int + :param compute_type: EndpointComputeType to be filtered by. Known values are: "Managed", + "Kubernetes", and "AzureMLCompute". Default value is None. + :type compute_type: str or ~azure.mgmt.machinelearningservices.models.EndpointComputeType + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: A set of tags with which to filter the returned models. It is a comma separated + string of tags key or tags key=value. Example: tagKey1,tagKey2,tagKey3=value3 . Default value + is None. + :type tags: str + :param properties: A set of properties with which to filter the returned models. It is a comma + separated string of properties key and/or properties key=value Example: + propKey1,propKey2,propKey3=value3 . Default value is None. + :type properties: str + :param order_by: The option to order the response. Known values are: "CreatedAtDesc", + "CreatedAtAsc", "UpdatedAtDesc", and "UpdatedAtAsc". Default value is None. + :type order_by: str or ~azure.mgmt.machinelearningservices.models.OrderString + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OnlineEndpoint or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.OnlineEndpointTrackedResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + name=name, + count=count, + compute_type=compute_type, + skip=skip, + tags=tags, + properties=properties, + order_by=order_by, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("OnlineEndpointTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete Online Endpoint (asynchronous). + + Delete Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> _models.OnlineEndpoint: + """Get Online Endpoint. + + Get Online Endpoint. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: OnlineEndpoint or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.OnlineEndpoint + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.OnlineEndpoint] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.PartialMinimalTrackedResourceWithIdentity, IO], + **kwargs: Any + ) -> Optional[_models.OnlineEndpoint]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.OnlineEndpoint]] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialMinimalTrackedResourceWithIdentity") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) + + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.PartialMinimalTrackedResourceWithIdentity, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.OnlineEndpoint]: + """Update Online Endpoint (asynchronous). + + Update Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.OnlineEndpoint]: + """Update Online Endpoint (asynchronous). + + Update Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.PartialMinimalTrackedResourceWithIdentity, IO], + **kwargs: Any + ) -> LROPoller[_models.OnlineEndpoint]: + """Update Online Endpoint (asynchronous). + + Update Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. Is either a model type or a IO + type. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.OnlineEndpoint] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.OnlineEndpoint, IO], + **kwargs: Any + ) -> _models.OnlineEndpoint: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.OnlineEndpoint] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "OnlineEndpoint") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.OnlineEndpoint, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.OnlineEndpoint]: + """Create or update Online Endpoint (asynchronous). + + Create or update Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OnlineEndpoint + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.OnlineEndpoint]: + """Create or update Online Endpoint (asynchronous). + + Create or update Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.OnlineEndpoint, IO], + **kwargs: Any + ) -> LROPoller[_models.OnlineEndpoint]: + """Create or update Online Endpoint (asynchronous). + + Create or update Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. Is either a model type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OnlineEndpoint or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.OnlineEndpoint] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + @distributed_trace + def list_keys( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> _models.EndpointAuthKeys: + """List EndpointAuthKeys for an Endpoint using Key-based authentication. + + List EndpointAuthKeys for an Endpoint using Key-based authentication. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EndpointAuthKeys or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.EndpointAuthKeys] + + request = build_list_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_keys.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("EndpointAuthKeys", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/listKeys"} # type: ignore + + def _regenerate_keys_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.RegenerateEndpointKeysRequest, IO], + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "RegenerateEndpointKeysRequest") + + request = build_regenerate_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._regenerate_keys_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _regenerate_keys_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys"} # type: ignore + + @overload + def begin_regenerate_keys( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.RegenerateEndpointKeysRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: RegenerateKeys request . Required. + :type body: ~azure.mgmt.machinelearningservices.models.RegenerateEndpointKeysRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_regenerate_keys( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: RegenerateKeys request . Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_regenerate_keys( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.RegenerateEndpointKeysRequest, IO], + **kwargs: Any + ) -> LROPoller[None]: + """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: RegenerateKeys request . Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.RegenerateEndpointKeysRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._regenerate_keys_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_regenerate_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys"} # type: ignore + + @distributed_trace + def get_token( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> _models.EndpointAuthToken: + """Retrieve a valid AAD token for an Endpoint using AMLToken-based authentication. + + Retrieve a valid AAD token for an Endpoint using AMLToken-based authentication. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EndpointAuthToken or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthToken + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.EndpointAuthToken] + + request = build_get_token_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_token.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("EndpointAuthToken", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_token.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/token"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_operations.py index ee7327b7cc20..d72f97be48aa 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,87 +6,122 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request(**kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/providers/Microsoft.MachineLearningServices/operations") -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") -class Operations(object): - """Operations operations. + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. +class Operations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`operations` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - def list( - self, - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.OperationListResult"] - """Lists all of the available Azure Machine Learning Workspaces REST API operations. + @distributed_trace + def list(self, **kwargs: Any) -> Iterable["_models.AmlOperation"]: + """Lists all of the available Azure Machine Learning Services REST API operations. :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OperationListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.OperationListResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either AmlOperation or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.AmlOperation] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.AmlOperationListResult] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - request = self._client.get(url, query_parameters, header_parameters) + request = build_list_request( + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('OperationListResult', pipeline_response) + deserialized = self._deserialize("AmlOperationListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -94,17 +130,18 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/providers/Microsoft.MachineLearningServices/operations'} # type: ignore + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/providers/Microsoft.MachineLearningServices/operations"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_patch.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_patch.py new file mode 100644 index 000000000000..f7dd32510333 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_endpoint_connections_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_endpoint_connections_operations.py index 33fa5932b552..adc47f8b888d 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_endpoint_connections_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_endpoint_connections_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,300 +6,572 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +class PrivateEndpointConnectionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`private_endpoint_connections` attribute. + """ - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + models = _models -class PrivateEndpointConnectionsOperations(object): - """PrivateEndpointConnectionsOperations operations. + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> Iterable["_models.PrivateEndpointConnection"]: + """List all the private endpoint connections associated with the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PrivateEndpointConnection or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnectionListResult] - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("PrivateEndpointConnectionListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) - models = _models + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + return pipeline_response + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections"} # type: ignore + + @distributed_trace def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - private_endpoint_connection_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.PrivateEndpointConnection" + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> _models.PrivateEndpointConnection: """Gets the specified private endpoint connection associated with the workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the workspace. + with the workspace. Required. :type private_endpoint_connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PrivateEndpointConnection, or the result of cls(response) + :return: PrivateEndpointConnection or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnection] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore - def put( + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore + + @overload + def create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - private_endpoint_connection_name, # type: str - properties, # type: "_models.PrivateEndpointConnection" - **kwargs # type: Any - ): - # type: (...) -> "_models.PrivateEndpointConnection" + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + properties: _models.PrivateEndpointConnection, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PrivateEndpointConnection: """Update the state of specified private endpoint connection associated with the workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the workspace. + with the workspace. Required. :type private_endpoint_connection_name: str - :param properties: The private endpoint connection properties. + :param properties: The private endpoint connection properties. Required. :type properties: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PrivateEndpointConnection, or the result of cls(response) + :return: PrivateEndpointConnection or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.put.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(properties, 'PrivateEndpointConnection') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + properties: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PrivateEndpointConnection: + """Update the state of specified private endpoint connection associated with the workspace. - return deserialized - put.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the workspace. Required. + :type private_endpoint_connection_name: str + :param properties: The private endpoint connection properties. Required. + :type properties: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnection or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ - def _delete_initial( + @distributed_trace + def create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - private_endpoint_connection_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + properties: Union[_models.PrivateEndpointConnection, IO], + **kwargs: Any + ) -> _models.PrivateEndpointConnection: + """Update the state of specified private endpoint connection associated with the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the workspace. Required. + :type private_endpoint_connection_name: str + :param properties: The private endpoint connection properties. Is either a model type or a IO + type. Required. + :type properties: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnection or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) + error_map.update(kwargs.pop("error_map", {}) or {}) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnection] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(properties, (IO, bytes)): + _content = properties + else: + _json = self._serialize.body(properties, "PrivateEndpointConnection") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 202, 204]: + if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) - _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + return deserialized - def begin_delete( - self, - resource_group_name, # type: str - workspace_name, # type: str - private_endpoint_connection_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> None: """Deletes the specified private endpoint connection associated with the workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the workspace. + with the workspace. Required. :type private_endpoint_connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] + :return: None or the result of cls(response) + :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._delete_initial( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - private_endpoint_connection_name=private_endpoint_connection_name, - cls=lambda x,y,z: x, - **kwargs - ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) + response = pipeline_response.http_response - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_link_resources_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_link_resources_operations.py index 89f73bf100c5..7fb90f76539e 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_link_resources_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_link_resources_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,100 +6,144 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, Optional, TypeVar + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Optional, TypeVar + _url = _format_url_section(_url, **path_format_arguments) - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") -class PrivateLinkResourcesOperations(object): - """PrivateLinkResourcesOperations operations. + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + +class PrivateLinkResourcesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`private_link_resources` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_workspace( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.PrivateLinkResourceListResult" + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.PrivateLinkResourceListResult: """Gets the private link resources that need to be created for a workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PrivateLinkResourceListResult, or the result of cls(response) + :return: PrivateLinkResourceListResult or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.PrivateLinkResourceListResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateLinkResourceListResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.list_by_workspace.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateLinkResourceListResult] + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('PrivateLinkResourceListResult', pipeline_response) + deserialized = self._deserialize("PrivateLinkResourceListResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources'} # type: ignore + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_quotas_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_quotas_operations.py index 2e50f583b164..14dc6b36ffd9 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_quotas_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_quotas_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,159 +6,284 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False -class QuotasOperations(object): - """QuotasOperations operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +def build_update_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas", + ) # pylint: disable=line-too-long + path_format_arguments = { + "location": _SERIALIZER.url("location", location, "str", pattern=r"^[-\w\._]+$"), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/quotas", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "location": _SERIALIZER.url("location", location, "str", pattern=r"^[-\w\._]+$"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class QuotasOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`quotas` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @overload def update( self, - location, # type: str - parameters, # type: "_models.QuotaUpdateParameters" - **kwargs # type: Any - ): - # type: (...) -> "_models.UpdateWorkspaceQuotasResult" + location: str, + parameters: _models.QuotaUpdateParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.UpdateWorkspaceQuotasResult: """Update quota for each VM family in workspace. - :param location: The location for update quota is queried. + :param location: The location for update quota is queried. Required. :type location: str - :param parameters: Quota update parameters. + :param parameters: Quota update parameters. Required. :type parameters: ~azure.mgmt.machinelearningservices.models.QuotaUpdateParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: UpdateWorkspaceQuotasResult, or the result of cls(response) + :return: UpdateWorkspaceQuotasResult or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotasResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update( + self, location: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.UpdateWorkspaceQuotasResult: + """Update quota for each VM family in workspace. + + :param location: The location for update quota is queried. Required. + :type location: str + :param parameters: Quota update parameters. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: UpdateWorkspaceQuotasResult or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotasResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update( + self, location: str, parameters: Union[_models.QuotaUpdateParameters, IO], **kwargs: Any + ) -> _models.UpdateWorkspaceQuotasResult: + """Update quota for each VM family in workspace. + + :param location: The location for update quota is queried. Required. + :type location: str + :param parameters: Quota update parameters. Is either a model type or a IO type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.QuotaUpdateParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: UpdateWorkspaceQuotasResult or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotasResult + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.UpdateWorkspaceQuotasResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'QuotaUpdateParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.UpdateWorkspaceQuotasResult] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "QuotaUpdateParameters") + + request = build_update_request( + location=location, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('UpdateWorkspaceQuotasResult', pipeline_response) + deserialized = self._deserialize("UpdateWorkspaceQuotasResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas'} # type: ignore - def list( - self, - location, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.ListWorkspaceQuotas"] + update.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas"} # type: ignore + + @distributed_trace + def list(self, location: str, **kwargs: Any) -> Iterable["_models.ResourceQuota"]: """Gets the currently assigned Workspace Quotas based on VMFamily. - :param location: The location for which resource usage is queried. + :param location: The location for which resource usage is queried. Required. :type location: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ListWorkspaceQuotas or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ListWorkspaceQuotas] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either ResourceQuota or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ResourceQuota] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListWorkspaceQuotas"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ListWorkspaceQuotas] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + location=location, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('ListWorkspaceQuotas', pipeline_response) + deserialized = self._deserialize("ListWorkspaceQuotas", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -166,16 +292,18 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/Quotas'} # type: ignore + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/quotas"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registries_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registries_operations.py new file mode 100644 index 000000000000..585df56f539d --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registries_operations.py @@ -0,0 +1,909 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_by_subscription_request( + subscription_id: str, *, skip: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/registries" + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_request( + resource_group_name: str, subscription_id: str, *, skip: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, registry_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request(resource_group_name: str, registry_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, registry_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, registry_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class RegistriesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`registries` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list_by_subscription(self, skip: Optional[str] = None, **kwargs: Any) -> Iterable["_models.Registry"]: + """List registries by subscription. + + List registries by subscription. + + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either Registry or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.Registry] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.RegistryTrackedResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + skip=skip, + api_version=api_version, + template_url=self.list_by_subscription.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("RegistryTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list_by_subscription.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/registries"} # type: ignore + + @distributed_trace + def list(self, resource_group_name: str, skip: Optional[str] = None, **kwargs: Any) -> Iterable["_models.Registry"]: + """List registries. + + List registries. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either Registry or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.Registry] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.RegistryTrackedResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("RegistryTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, registry_name: str, **kwargs: Any + ) -> None: + """Delete registry. + + Delete registry. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of registry. This is case-insensitive. Required. + :type registry_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}"} # type: ignore + + @distributed_trace + def get(self, resource_group_name: str, registry_name: str, **kwargs: Any) -> _models.Registry: + """Get registry. + + Get registry. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of registry. This is case-insensitive. Required. + :type registry_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Registry or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Registry + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.Registry] + + request = build_get_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("Registry", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}"} # type: ignore + + @overload + def update( + self, + resource_group_name: str, + registry_name: str, + body: _models.PartialRegistryPartialTrackedResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Registry: + """Update tags. + + Update tags. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of registry. This is case-insensitive. Required. + :type registry_name: str + :param body: Details required to create the registry. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialRegistryPartialTrackedResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Registry or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Registry + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update( + self, + resource_group_name: str, + registry_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Registry: + """Update tags. + + Update tags. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of registry. This is case-insensitive. Required. + :type registry_name: str + :param body: Details required to create the registry. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Registry or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Registry + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update( + self, + resource_group_name: str, + registry_name: str, + body: Union[_models.PartialRegistryPartialTrackedResource, IO], + **kwargs: Any + ) -> _models.Registry: + """Update tags. + + Update tags. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of registry. This is case-insensitive. Required. + :type registry_name: str + :param body: Details required to create the registry. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialRegistryPartialTrackedResource or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Registry or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Registry + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Registry] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialRegistryPartialTrackedResource") + + request = build_update_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("Registry", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}"} # type: ignore + + def _create_or_update_initial( + self, resource_group_name: str, registry_name: str, body: Union[_models.Registry, IO], **kwargs: Any + ) -> _models.Registry: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Registry] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "Registry") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("Registry", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("Registry", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + body: _models.Registry, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Registry]: + """Create or update registry. + + Create or update registry. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of registry. This is case-insensitive. Required. + :type registry_name: str + :param body: Details required to create the registry. Required. + :type body: ~azure.mgmt.machinelearningservices.models.Registry + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Registry or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Registry] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Registry]: + """Create or update registry. + + Create or update registry. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of registry. This is case-insensitive. Required. + :type registry_name: str + :param body: Details required to create the registry. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Registry or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Registry] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, resource_group_name: str, registry_name: str, body: Union[_models.Registry, IO], **kwargs: Any + ) -> LROPoller[_models.Registry]: + """Create or update registry. + + Create or update registry. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of registry. This is case-insensitive. Required. + :type registry_name: str + :param body: Details required to create the registry. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.Registry or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Registry or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Registry] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Registry] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("Registry", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_code_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_code_containers_operations.py new file mode 100644 index 000000000000..a0cd67344b9e --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_code_containers_operations.py @@ -0,0 +1,724 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, registry_name: str, subscription_id: str, *, skip: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, registry_name: str, code_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "codeName": _SERIALIZER.url("code_name", code_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, registry_name: str, code_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "codeName": _SERIALIZER.url("code_name", code_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, registry_name: str, code_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "codeName": _SERIALIZER.url("code_name", code_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class RegistryCodeContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`registry_code_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, registry_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> Iterable["_models.CodeContainer"]: + """List containers. + + List containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either CodeContainer or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.CodeContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + subscription_id=self._config.subscription_id, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("CodeContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, registry_name: str, code_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + code_name=code_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, registry_name: str, code_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + code_name=code_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}"} # type: ignore + + @distributed_trace + def get(self, resource_group_name: str, registry_name: str, code_name: str, **kwargs: Any) -> _models.CodeContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeContainer] + + request = build_get_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + code_name=code_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("CodeContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + body: Union[_models.CodeContainer, IO], + **kwargs: Any + ) -> _models.CodeContainer: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeContainer] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "CodeContainer") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + code_name=code_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("CodeContainer", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("CodeContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + body: _models.CodeContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.CodeContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CodeContainer or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.CodeContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.CodeContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CodeContainer or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.CodeContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + body: Union[_models.CodeContainer, IO], + **kwargs: Any + ) -> LROPoller[_models.CodeContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param body: Container entity to create or update. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CodeContainer or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.CodeContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeContainer] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + code_name=code_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("CodeContainer", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_code_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_code_versions_operations.py new file mode 100644 index 000000000000..7168a126d943 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_code_versions_operations.py @@ -0,0 +1,776 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + registry_name: str, + code_name: str, + subscription_id: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "codeName": _SERIALIZER.url("code_name", code_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if order_by is not None: + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") + if top is not None: + _params["$top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, registry_name: str, code_name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "codeName": _SERIALIZER.url("code_name", code_name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, registry_name: str, code_name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "codeName": _SERIALIZER.url("code_name", code_name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, registry_name: str, code_name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "codeName": _SERIALIZER.url("code_name", code_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class RegistryCodeVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`registry_code_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.CodeVersion"]: + """List versions. + + List versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either CodeVersion or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.CodeVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + code_name=code_name, + subscription_id=self._config.subscription_id, + order_by=order_by, + top=top, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("CodeVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, registry_name: str, code_name: str, version: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + code_name=code_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, registry_name: str, code_name: str, version: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param version: Version identifier. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + code_name=code_name, + version=version, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, registry_name: str, code_name: str, version: str, **kwargs: Any + ) -> _models.CodeVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param version: Version identifier. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeVersion] + + request = build_get_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + code_name=code_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("CodeVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + version: str, + body: Union[_models.CodeVersion, IO], + **kwargs: Any + ) -> _models.CodeVersion: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeVersion] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "CodeVersion") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + code_name=code_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("CodeVersion", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("CodeVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + version: str, + body: _models.CodeVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.CodeVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CodeVersion or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.CodeVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.CodeVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CodeVersion or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.CodeVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + version: str, + body: Union[_models.CodeVersion, IO], + **kwargs: Any + ) -> LROPoller[_models.CodeVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CodeVersion or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.CodeVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.CodeVersion] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + code_name=code_name, + version=version, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("CodeVersion", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_component_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_component_containers_operations.py new file mode 100644 index 000000000000..c954cbd92cb8 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_component_containers_operations.py @@ -0,0 +1,732 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, registry_name: str, subscription_id: str, *, skip: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, registry_name: str, component_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "componentName": _SERIALIZER.url("component_name", component_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, registry_name: str, component_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "componentName": _SERIALIZER.url("component_name", component_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, registry_name: str, component_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "componentName": _SERIALIZER.url( + "component_name", component_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class RegistryComponentContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`registry_component_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, registry_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> Iterable["_models.ComponentContainer"]: + """List containers. + + List containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ComponentContainer or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ComponentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + subscription_id=self._config.subscription_id, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ComponentContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, registry_name: str, component_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + component_name=component_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, registry_name: str, component_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + component_name=component_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, registry_name: str, component_name: str, **kwargs: Any + ) -> _models.ComponentContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentContainer] + + request = build_get_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + component_name=component_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ComponentContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + body: Union[_models.ComponentContainer, IO], + **kwargs: Any + ) -> _models.ComponentContainer: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentContainer] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ComponentContainer") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + component_name=component_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("ComponentContainer", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ComponentContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + body: _models.ComponentContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ComponentContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ComponentContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComponentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ComponentContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ComponentContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComponentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + body: Union[_models.ComponentContainer, IO], + **kwargs: Any + ) -> LROPoller[_models.ComponentContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param body: Container entity to create or update. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ComponentContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComponentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentContainer] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + component_name=component_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ComponentContainer", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_component_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_component_versions_operations.py new file mode 100644 index 000000000000..55525b1bca32 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_component_versions_operations.py @@ -0,0 +1,782 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + registry_name: str, + component_name: str, + subscription_id: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "componentName": _SERIALIZER.url("component_name", component_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if order_by is not None: + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") + if top is not None: + _params["$top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, registry_name: str, component_name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "componentName": _SERIALIZER.url("component_name", component_name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, registry_name: str, component_name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "componentName": _SERIALIZER.url("component_name", component_name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, registry_name: str, component_name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "componentName": _SERIALIZER.url( + "component_name", component_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class RegistryComponentVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`registry_component_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.ComponentVersion"]: + """List versions. + + List versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ComponentVersion or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ComponentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + component_name=component_name, + subscription_id=self._config.subscription_id, + order_by=order_by, + top=top, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ComponentVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, registry_name: str, component_name: str, version: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + component_name=component_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, registry_name: str, component_name: str, version: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param version: Version identifier. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + component_name=component_name, + version=version, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, registry_name: str, component_name: str, version: str, **kwargs: Any + ) -> _models.ComponentVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param version: Version identifier. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentVersion] + + request = build_get_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + component_name=component_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ComponentVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + version: str, + body: Union[_models.ComponentVersion, IO], + **kwargs: Any + ) -> _models.ComponentVersion: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentVersion] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ComponentVersion") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + component_name=component_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("ComponentVersion", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ComponentVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + version: str, + body: _models.ComponentVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ComponentVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ComponentVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComponentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ComponentVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ComponentVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComponentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + version: str, + body: Union[_models.ComponentVersion, IO], + **kwargs: Any + ) -> LROPoller[_models.ComponentVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ComponentVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComponentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ComponentVersion] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + component_name=component_name, + version=version, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ComponentVersion", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_environment_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_environment_containers_operations.py new file mode 100644 index 000000000000..af6e94772431 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_environment_containers_operations.py @@ -0,0 +1,750 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + registry_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, registry_name: str, environment_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "environmentName": _SERIALIZER.url("environment_name", environment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, registry_name: str, environment_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "environmentName": _SERIALIZER.url("environment_name", environment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, registry_name: str, environment_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "environmentName": _SERIALIZER.url( + "environment_name", environment_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class RegistryEnvironmentContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`registry_environment_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + registry_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.EnvironmentContainer"]: + """List environment containers. + + List environment containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either EnvironmentContainer or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + subscription_id=self._config.subscription_id, + skip=skip, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("EnvironmentContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, registry_name: str, environment_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + environment_name=environment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, registry_name: str, environment_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + environment_name=environment_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, registry_name: str, environment_name: str, **kwargs: Any + ) -> _models.EnvironmentContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param environment_name: Container name. This is case-sensitive. Required. + :type environment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentContainer] + + request = build_get_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + environment_name=environment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + body: Union[_models.EnvironmentContainer, IO], + **kwargs: Any + ) -> _models.EnvironmentContainer: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentContainer] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "EnvironmentContainer") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + environment_name=environment_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + body: _models.EnvironmentContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.EnvironmentContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either EnvironmentContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.EnvironmentContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either EnvironmentContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + body: Union[_models.EnvironmentContainer, IO], + **kwargs: Any + ) -> LROPoller[_models.EnvironmentContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :param body: Container entity to create or update. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either EnvironmentContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentContainer] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + environment_name=environment_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_environment_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_environment_versions_operations.py new file mode 100644 index 000000000000..09c88000862b --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_environment_versions_operations.py @@ -0,0 +1,805 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + registry_name: str, + environment_name: str, + subscription_id: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "environmentName": _SERIALIZER.url("environment_name", environment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if order_by is not None: + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") + if top is not None: + _params["$top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, + registry_name: str, + environment_name: str, + version: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "environmentName": _SERIALIZER.url("environment_name", environment_name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, + registry_name: str, + environment_name: str, + version: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "environmentName": _SERIALIZER.url("environment_name", environment_name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, + registry_name: str, + environment_name: str, + version: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "environmentName": _SERIALIZER.url( + "environment_name", environment_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class RegistryEnvironmentVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`registry_environment_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.EnvironmentVersion"]: + """List versions. + + List versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param environment_name: Container name. This is case-sensitive. Required. + :type environment_name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either EnvironmentVersion or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + environment_name=environment_name, + subscription_id=self._config.subscription_id, + order_by=order_by, + top=top, + skip=skip, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("EnvironmentVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, registry_name: str, environment_name: str, version: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + environment_name=environment_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, registry_name: str, environment_name: str, version: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :param version: Version identifier. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + environment_name=environment_name, + version=version, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, registry_name: str, environment_name: str, version: str, **kwargs: Any + ) -> _models.EnvironmentVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param environment_name: Container name. This is case-sensitive. Required. + :type environment_name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentVersion] + + request = build_get_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + environment_name=environment_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + version: str, + body: Union[_models.EnvironmentVersion, IO], + **kwargs: Any + ) -> _models.EnvironmentVersion: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentVersion] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "EnvironmentVersion") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + environment_name=environment_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + version: str, + body: _models.EnvironmentVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.EnvironmentVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either EnvironmentVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.EnvironmentVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either EnvironmentVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + version: str, + body: Union[_models.EnvironmentVersion, IO], + **kwargs: Any + ) -> LROPoller[_models.EnvironmentVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either EnvironmentVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.EnvironmentVersion] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + environment_name=environment_name, + version=version, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_model_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_model_containers_operations.py new file mode 100644 index 000000000000..525ea5edfa58 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_model_containers_operations.py @@ -0,0 +1,746 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + registry_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, registry_name: str, model_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "modelName": _SERIALIZER.url("model_name", model_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, registry_name: str, model_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "modelName": _SERIALIZER.url("model_name", model_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, registry_name: str, model_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "modelName": _SERIALIZER.url("model_name", model_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class RegistryModelContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`registry_model_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + registry_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.ModelContainer"]: + """List model containers. + + List model containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ModelContainer or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ModelContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + subscription_id=self._config.subscription_id, + skip=skip, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ModelContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, registry_name: str, model_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, registry_name: str, model_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, registry_name: str, model_name: str, **kwargs: Any + ) -> _models.ModelContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param model_name: Container name. This is case-sensitive. Required. + :type model_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelContainer] + + request = build_get_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ModelContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + body: Union[_models.ModelContainer, IO], + **kwargs: Any + ) -> _models.ModelContainer: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelContainer] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ModelContainer") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("ModelContainer", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ModelContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + body: _models.ModelContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ModelContainer]: + """Create or update model container. + + Create or update model container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ModelContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ModelContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ModelContainer]: + """Create or update model container. + + Create or update model container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ModelContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ModelContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + body: Union[_models.ModelContainer, IO], + **kwargs: Any + ) -> LROPoller[_models.ModelContainer]: + """Create or update model container. + + Create or update model container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :param body: Container entity to create or update. Is either a model type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ModelContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ModelContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelContainer] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ModelContainer", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_model_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_model_versions_operations.py new file mode 100644 index 000000000000..53d812a6be1e --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_model_versions_operations.py @@ -0,0 +1,814 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + registry_name: str, + model_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + order_by: Optional[str] = None, + top: Optional[int] = None, + version: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "modelName": _SERIALIZER.url("model_name", model_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if order_by is not None: + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") + if top is not None: + _params["$top"] = _SERIALIZER.query("top", top, "int") + if version is not None: + _params["version"] = _SERIALIZER.query("version", version, "str") + if description is not None: + _params["description"] = _SERIALIZER.query("description", description, "str") + if tags is not None: + _params["tags"] = _SERIALIZER.query("tags", tags, "str") + if properties is not None: + _params["properties"] = _SERIALIZER.query("properties", properties, "str") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, registry_name: str, model_name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "modelName": _SERIALIZER.url("model_name", model_name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, registry_name: str, model_name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "modelName": _SERIALIZER.url("model_name", model_name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, registry_name: str, model_name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url("registry_name", registry_name, "str"), + "modelName": _SERIALIZER.url("model_name", model_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class RegistryModelVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`registry_model_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + skip: Optional[str] = None, + order_by: Optional[str] = None, + top: Optional[int] = None, + version: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.ModelVersion"]: + """List versions. + + List versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param model_name: Container name. This is case-sensitive. Required. + :type model_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param version: Version identifier. Default value is None. + :type version: str + :param description: Model description. Default value is None. + :type description: str + :param tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :type tags: str + :param properties: Comma-separated list of property names (and optionally values). Example: + prop1,prop2=value2. Default value is None. + :type properties: str + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ModelVersion or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ModelVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + subscription_id=self._config.subscription_id, + skip=skip, + order_by=order_by, + top=top, + version=version, + description=description, + tags=tags, + properties=properties, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ModelVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, registry_name: str, model_name: str, version: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}"} # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, registry_name: str, model_name: str, version: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :param version: Version identifier. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + version=version, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}"} # type: ignore + + @distributed_trace + def get( + self, resource_group_name: str, registry_name: str, model_name: str, version: str, **kwargs: Any + ) -> _models.ModelVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param model_name: Container name. This is case-sensitive. Required. + :type model_name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelVersion] + + request = build_get_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ModelVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: Union[_models.ModelVersion, IO], + **kwargs: Any + ) -> _models.ModelVersion: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelVersion] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ModelVersion") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("ModelVersion", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ModelVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: _models.ModelVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ModelVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ModelVersion or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ModelVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ModelVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ModelVersion or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ModelVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: Union[_models.ModelVersion, IO], + **kwargs: Any + ) -> LROPoller[_models.ModelVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ModelVersion or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ModelVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.ModelVersion] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + version=version, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ModelVersion", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_schedules_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_schedules_operations.py new file mode 100644 index 000000000000..abfa2d36c7b3 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_schedules_operations.py @@ -0,0 +1,721 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ScheduleListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class SchedulesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`schedules` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ScheduleListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.Schedule"]: + """List schedules in specified workspace. + + List schedules in specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: Status filter for schedule. Known values are: "EnabledOnly", + "DisabledOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either Schedule or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.Schedule] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ScheduleResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + list_view_type=list_view_type, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ScheduleResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore + + @distributed_trace + def begin_delete(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> LROPoller[None]: + """Delete schedule. + + Delete schedule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Schedule name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore + + @distributed_trace + def get(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> _models.Schedule: + """Get schedule. + + Get schedule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Schedule name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Schedule or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Schedule + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.Schedule] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("Schedule", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore + + def _create_or_update_initial( + self, resource_group_name: str, workspace_name: str, name: str, body: Union[_models.Schedule, IO], **kwargs: Any + ) -> _models.Schedule: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Schedule] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IO, bytes)): + _content = body + else: + _json = self._serialize.body(body, "Schedule") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("Schedule", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("Schedule", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.Schedule, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Schedule]: + """Create or update schedule. + + Create or update schedule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Schedule name. Required. + :type name: str + :param body: Schedule definition. Required. + :type body: ~azure.mgmt.machinelearningservices.models.Schedule + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Schedule or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Schedule] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Schedule]: + """Create or update schedule. + + Create or update schedule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Schedule name. Required. + :type name: str + :param body: Schedule definition. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Schedule or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Schedule] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, resource_group_name: str, workspace_name: str, name: str, body: Union[_models.Schedule, IO], **kwargs: Any + ) -> LROPoller[_models.Schedule]: + """Create or update schedule. + + Create or update schedule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Schedule name. Required. + :type name: str + :param body: Schedule definition. Is either a model type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.Schedule or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Schedule or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Schedule] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Schedule] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("Schedule", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_usages_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_usages_operations.py index a300468995d3..1fa0f400d92e 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_usages_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_usages_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,96 +6,136 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "location": _SERIALIZER.url("location", location, "str", pattern=r"^[-\w\._]+$"), + } -class UsagesOperations(object): - """UsagesOperations operations. + _url = _format_url_section(_url, **path_format_arguments) - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class UsagesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`usages` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list( - self, - location, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.ListUsagesResult"] + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list(self, location: str, **kwargs: Any) -> Iterable["_models.Usage"]: """Gets the current usage information as well as limits for AML resources for given subscription and location. - :param location: The location for which resource usage is queried. + :param location: The location for which resource usage is queried. Required. :type location: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ListUsagesResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ListUsagesResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Usage or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.Usage] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListUsagesResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ListUsagesResult] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + location=location, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('ListUsagesResult', pipeline_response) + deserialized = self._deserialize("ListUsagesResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -103,16 +144,18 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages'} # type: ignore + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_virtual_machine_sizes_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_virtual_machine_sizes_operations.py index 44557ed77254..63dd3c128cf5 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_virtual_machine_sizes_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_virtual_machine_sizes_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,106 +6,133 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, Optional, TypeVar + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes", + ) # pylint: disable=line-too-long + path_format_arguments = { + "location": _SERIALIZER.url("location", location, "str", pattern=r"^[-\w\._]+$"), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Optional, TypeVar + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -class VirtualMachineSizesOperations(object): - """VirtualMachineSizesOperations operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class VirtualMachineSizesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`virtual_machine_sizes` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list( - self, - location, # type: str - compute_type=None, # type: Optional[str] - recommended=None, # type: Optional[bool] - **kwargs # type: Any - ): - # type: (...) -> "_models.VirtualMachineSizeListResult" + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list(self, location: str, **kwargs: Any) -> _models.VirtualMachineSizeListResult: """Returns supported VM Sizes in a location. - :param location: The location upon which virtual-machine-sizes is queried. + :param location: The location upon which virtual-machine-sizes is queried. Required. :type location: str - :param compute_type: Type of compute to filter by. - :type compute_type: str - :param recommended: Specifies whether to return recommended vm sizes or all vm sizes. - :type recommended: bool :keyword callable cls: A custom type or function that will be passed the direct response - :return: VirtualMachineSizeListResult, or the result of cls(response) + :return: VirtualMachineSizeListResult or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.VirtualMachineSizeListResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineSizeListResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if compute_type is not None: - query_parameters['compute-type'] = self._serialize.query("compute_type", compute_type, 'str') - if recommended is not None: - query_parameters['recommended'] = self._serialize.query("recommended", recommended, 'bool') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualMachineSizeListResult] + + request = build_list_request( + location=location, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('VirtualMachineSizeListResult', pipeline_response) + deserialized = self._deserialize("VirtualMachineSizeListResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes'} # type: ignore + + list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_connections_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_connections_operations.py index 71e2c3c11b36..280f8d3403a3 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_connections_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_connections_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,325 +6,576 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_create_request( + resource_group_name: str, workspace_name: str, connection_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "connectionName": _SERIALIZER.url("connection_name", connection_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, connection_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "connectionName": _SERIALIZER.url("connection_name", connection_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, connection_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "connectionName": _SERIALIZER.url("connection_name", connection_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + target: Optional[str] = None, + category: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + if target is not None: + _params["target"] = _SERIALIZER.query("target", target, "str") + if category is not None: + _params["category"] = _SERIALIZER.query("category", category, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class WorkspaceConnectionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class WorkspaceConnectionsOperations(object): - """WorkspaceConnectionsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`workspace_connections` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - def list( + @overload + def create( self, - resource_group_name, # type: str - workspace_name, # type: str - target=None, # type: Optional[str] - category=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.PaginatedWorkspaceConnectionsList"] - """List all connections under a AML workspace. - - :param resource_group_name: Name of the resource group in which workspace is located. + resource_group_name: str, + workspace_name: str, + connection_name: str, + parameters: _models.WorkspaceConnectionPropertiesV2BasicResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """create. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param target: Target of the workspace connection. - :type target: str - :param category: Category of the workspace connection. - :type category: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :param parameters: The object for creating or updating a new workspace connection. Required. + :type parameters: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PaginatedWorkspaceConnectionsList or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedWorkspaceConnectionsList] - :raises: ~azure.core.exceptions.HttpResponseError + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PaginatedWorkspaceConnectionsList"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if target is not None: - query_parameters['target'] = self._serialize.query("target", target, 'str') - if category is not None: - query_parameters['category'] = self._serialize.query("category", category, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('PaginatedWorkspaceConnectionsList', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections'} # type: ignore + @overload + def create( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """create. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :param parameters: The object for creating or updating a new workspace connection. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + @distributed_trace def create( self, - resource_group_name, # type: str - workspace_name, # type: str - connection_name, # type: str - parameters, # type: "_models.WorkspaceConnectionDto" - **kwargs # type: Any - ): - # type: (...) -> "_models.WorkspaceConnection" - """Add a new workspace connection. - - :param resource_group_name: Name of the resource group in which workspace is located. + resource_group_name: str, + workspace_name: str, + connection_name: str, + parameters: Union[_models.WorkspaceConnectionPropertiesV2BasicResource, IO], + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """create. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param connection_name: Friendly name of the workspace connection. + :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str - :param parameters: The object for creating or updating a new workspace connection. - :type parameters: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionDto + :param parameters: The object for creating or updating a new workspace connection. Is either a + model type or a IO type. Required. + :type parameters: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: WorkspaceConnection, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnection - :raises: ~azure.core.exceptions.HttpResponseError + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceConnection"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'connectionName': self._serialize.url("connection_name", connection_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'WorkspaceConnectionDto') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResource] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "WorkspaceConnectionPropertiesV2BasicResource") + + request = build_create_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('WorkspaceConnection', pipeline_response) + deserialized = self._deserialize("WorkspaceConnectionPropertiesV2BasicResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore + create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}"} # type: ignore + + @distributed_trace def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - connection_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.WorkspaceConnection" - """Get the detail of a workspace connection. - - :param resource_group_name: Name of the resource group in which workspace is located. + self, resource_group_name: str, workspace_name: str, connection_name: str, **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """get. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param connection_name: Friendly name of the workspace connection. + :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: WorkspaceConnection, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnection - :raises: ~azure.core.exceptions.HttpResponseError + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceConnection"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'connectionName': self._serialize.url("connection_name", connection_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResource] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('WorkspaceConnection', pipeline_response) + deserialized = self._deserialize("WorkspaceConnectionPropertiesV2BasicResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore - def delete( - self, - resource_group_name, # type: str - workspace_name, # type: str - connection_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Delete a workspace connection. - - :param resource_group_name: Name of the resource group in which workspace is located. + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, connection_name: str, **kwargs: Any + ) -> None: + """delete. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param connection_name: Friendly name of the workspace connection. + :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'connectionName': self._serialize.url("connection_name", connection_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore + delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}"} # type: ignore + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + target: Optional[str] = None, + category: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.WorkspaceConnectionPropertiesV2BasicResource"]: + """list. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param target: Target of the workspace connection. Default value is None. + :type target: str + :param category: Category of the workspace connection. Default value is None. + :type category: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either WorkspaceConnectionPropertiesV2BasicResource or + the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop( + "cls", None + ) # type: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + target=target, + category=category, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize( + "WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult", pipeline_response + ) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_features_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_features_operations.py index ae71b091b5f7..b8ba160db0b6 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_features_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_features_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,99 +6,144 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } -class WorkspaceFeaturesOperations(object): - """WorkspaceFeaturesOperations operations. + _url = _format_url_section(_url, **path_format_arguments) - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class WorkspaceFeaturesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`workspace_features` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.ListAmlUserFeatureResult"] + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> Iterable["_models.AmlUserFeature"]: """Lists all enabled features for a workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ListAmlUserFeatureResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ListAmlUserFeatureResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either AmlUserFeature or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.AmlUserFeature] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListAmlUserFeatureResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ListAmlUserFeatureResult] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('ListAmlUserFeatureResult', pipeline_response) + deserialized = self._deserialize("ListAmlUserFeatureResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -106,17 +152,18 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features'} # type: ignore + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspaces_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspaces_operations.py index c1aa91f9fc11..dcebc16e2b88 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspaces_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspaces_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,473 +6,1177 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_by_resource_group_request( + resource_group_name: str, subscription_id: str, *, skip: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_diagnose_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_keys_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_resync_keys_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_by_subscription_request( + subscription_id: str, *, skip: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces" + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_notebook_access_token_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_prepare_notebook_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_storage_account_keys_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_notebook_keys_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] -class WorkspacesOperations(object): - """WorkspacesOperations operations. +def build_list_outbound_network_dependencies_endpoints_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. + api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-10-01-preview")) # type: str + accept = _headers.pop("Accept", "application/json") - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class WorkspacesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`workspaces` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.Workspace" + @distributed_trace + def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _models.Workspace: """Gets the properties of the specified machine learning workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Workspace, or the result of cls(response) + :return: Workspace or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.Workspace - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.Workspace] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore def _create_or_update_initial( - self, - resource_group_name, # type: str - workspace_name, # type: str - parameters, # type: "_models.Workspace" - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.Workspace"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Workspace"]] + self, resource_group_name: str, workspace_name: str, parameters: Union[_models.Workspace, IO], **kwargs: Any + ) -> Optional[_models.Workspace]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._create_or_update_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'Workspace') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.Workspace]] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "Workspace") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response - if response.status_code not in [200, 201, 202]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: - deserialized = self._deserialize('Workspace', pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + + @overload def begin_create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - parameters, # type: "_models.Workspace" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.Workspace"] + resource_group_name: str, + workspace_name: str, + parameters: _models.Workspace, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: """Creates or updates a workspace with the specified parameters. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :param parameters: The parameters for creating or updating a machine learning workspace. + Required. :type parameters: ~azure.mgmt.machinelearningservices.models.Workspace + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of LROPoller that returns either Workspace or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Creates or updates a workspace with the specified parameters. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param parameters: The parameters for creating or updating a machine learning workspace. + Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Workspace or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, resource_group_name: str, workspace_name: str, parameters: Union[_models.Workspace, IO], **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Creates or updates a workspace with the specified parameters. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param parameters: The parameters for creating or updating a machine learning workspace. Is + either a model type or a IO type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.Workspace or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Workspace or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Workspace] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = self._create_or_update_initial( + raw_result = self._create_or_update_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, parameters=parameters, - cls=lambda x,y,z: x, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize('Workspace', pipeline_response) - + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - def _delete_initial( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) - _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore - def begin_delete( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + @distributed_trace + def begin_delete(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> LROPoller[None]: """Deletes a machine learning workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, - cls=lambda x,y,z: x, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) + kwargs.pop("error_map", None) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - def update( - self, - resource_group_name, # type: str - workspace_name, # type: str - parameters, # type: "_models.WorkspaceUpdateParameters" - **kwargs # type: Any - ): - # type: (...) -> "_models.Workspace" - """Updates a machine learning workspace with the specified parameters. + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param parameters: The parameters for updating a machine learning workspace. - :type parameters: ~azure.mgmt.machinelearningservices.models.WorkspaceUpdateParameters - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Workspace, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.Workspace - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] + def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + parameters: Union[_models.WorkspaceUpdateParameters, IO], + **kwargs: Any + ) -> Optional[_models.Workspace]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'WorkspaceUpdateParameters') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.Workspace]] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "WorkspaceUpdateParameters") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore - def list_by_resource_group( + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + parameters: _models.WorkspaceUpdateParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Updates a machine learning workspace with the specified parameters. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param parameters: The parameters for updating a machine learning workspace. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.WorkspaceUpdateParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Workspace or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Updates a machine learning workspace with the specified parameters. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param parameters: The parameters for updating a machine learning workspace. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Workspace or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( self, - resource_group_name, # type: str - skiptoken=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.WorkspaceListResult"] + resource_group_name: str, + workspace_name: str, + parameters: Union[_models.WorkspaceUpdateParameters, IO], + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Updates a machine learning workspace with the specified parameters. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param parameters: The parameters for updating a machine learning workspace. Is either a model + type or a IO type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.WorkspaceUpdateParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Workspace or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Workspace] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("Workspace", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + + @distributed_trace + def list_by_resource_group( + self, resource_group_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> Iterable["_models.Workspace"]: """Lists all the available machine learning workspaces under the specified resource group. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param skiptoken: Continuation token for pagination. - :type skiptoken: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceListResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Workspace or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceListResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.WorkspaceListResult] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_resource_group.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if skiptoken is not None: - query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + skip=skip, + api_version=api_version, + template_url=self.list_by_resource_group.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + deserialized = self._deserialize("WorkspaceListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -480,190 +1185,483 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return ItemPaged( - get_next, extract_data + return ItemPaged(get_next, extract_data) + + list_by_resource_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces"} # type: ignore + + def _diagnose_initial( + self, + resource_group_name: str, + workspace_name: str, + parameters: Optional[Union[_models.DiagnoseWorkspaceParameters, IO]] = None, + **kwargs: Any + ) -> Optional[_models.DiagnoseResponseResult]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.DiagnoseResponseResult]] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + if parameters is not None: + _json = self._serialize.body(parameters, "DiagnoseWorkspaceParameters") + else: + _json = None + + request = build_diagnose_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._diagnose_initial.metadata["url"], + headers=_headers, + params=_params, ) - list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - def list_keys( + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("DiagnoseResponseResult", pipeline_response) + + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _diagnose_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose"} # type: ignore + + @overload + def begin_diagnose( + self, + resource_group_name: str, + workspace_name: str, + parameters: Optional[_models.DiagnoseWorkspaceParameters] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DiagnoseResponseResult]: + """Diagnose workspace setup issue. + + Diagnose workspace setup issue. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param parameters: The parameter of diagnosing workspace health. Default value is None. + :type parameters: ~azure.mgmt.machinelearningservices.models.DiagnoseWorkspaceParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either DiagnoseResponseResult or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.DiagnoseResponseResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_diagnose( + self, + resource_group_name: str, + workspace_name: str, + parameters: Optional[IO] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DiagnoseResponseResult]: + """Diagnose workspace setup issue. + + Diagnose workspace setup issue. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param parameters: The parameter of diagnosing workspace health. Default value is None. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either DiagnoseResponseResult or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.DiagnoseResponseResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_diagnose( self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.ListWorkspaceKeysResult" + resource_group_name: str, + workspace_name: str, + parameters: Optional[Union[_models.DiagnoseWorkspaceParameters, IO]] = None, + **kwargs: Any + ) -> LROPoller[_models.DiagnoseResponseResult]: + """Diagnose workspace setup issue. + + Diagnose workspace setup issue. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param parameters: The parameter of diagnosing workspace health. Is either a model type or a IO + type. Default value is None. + :type parameters: ~azure.mgmt.machinelearningservices.models.DiagnoseWorkspaceParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either DiagnoseResponseResult or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.DiagnoseResponseResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.DiagnoseResponseResult] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._diagnose_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("DiagnoseResponseResult", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_diagnose.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose"} # type: ignore + + @distributed_trace + def list_keys( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.ListWorkspaceKeysResult: """Lists all the keys associated with this workspace. This includes keys for the storage account, app insights and password for container registry. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListWorkspaceKeysResult, or the result of cls(response) + :return: ListWorkspaceKeysResult or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ListWorkspaceKeysResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListWorkspaceKeysResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.list_keys.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ListWorkspaceKeysResult] + + request = build_list_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_keys.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ListWorkspaceKeysResult', pipeline_response) + deserialized = self._deserialize("ListWorkspaceKeysResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys'} # type: ignore - def resync_keys( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Resync all the keys associated with this workspace. This includes keys for the storage account, - app insights and password for container registry. + list_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys"} # type: ignore - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] + def _resync_keys_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.resync_keys.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_resync_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._resync_keys_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) - resync_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} # type: ignore + _resync_keys_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys"} # type: ignore - def list_by_subscription( - self, - skiptoken=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.WorkspaceListResult"] + @distributed_trace + def begin_resync_keys(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> LROPoller[None]: + """Resync all the keys associated with this workspace. This includes keys for the storage account, + app insights and password for container registry. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._resync_keys_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_resync_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys"} # type: ignore + + @distributed_trace + def list_by_subscription(self, skip: Optional[str] = None, **kwargs: Any) -> Iterable["_models.Workspace"]: """Lists all the available machine learning workspaces under the specified subscription. - :param skiptoken: Continuation token for pagination. - :type skiptoken: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceListResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Workspace or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceListResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.WorkspaceListResult] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_subscription.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if skiptoken is not None: - query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + skip=skip, + api_version=api_version, + template_url=self.list_by_subscription.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + deserialized = self._deserialize("WorkspaceListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -672,17 +1670,391 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return ItemPaged( - get_next, extract_data + return ItemPaged(get_next, extract_data) + + list_by_subscription.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces"} # type: ignore + + @distributed_trace + def list_notebook_access_token( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.NotebookAccessTokenResult: + """return notebook access token and refresh token. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: NotebookAccessTokenResult or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.NotebookAccessTokenResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.NotebookAccessTokenResult] + + request = build_list_notebook_access_token_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_notebook_access_token.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("NotebookAccessTokenResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_notebook_access_token.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken"} # type: ignore + + def _prepare_notebook_initial( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> Optional[_models.NotebookResourceInfo]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.NotebookResourceInfo]] + + request = build_prepare_notebook_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._prepare_notebook_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize("NotebookResourceInfo", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _prepare_notebook_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook"} # type: ignore + + @distributed_trace + def begin_prepare_notebook( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> LROPoller[_models.NotebookResourceInfo]: + """Prepare a notebook. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either NotebookResourceInfo or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.NotebookResourceInfo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.NotebookResourceInfo] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._prepare_notebook_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("NotebookResourceInfo", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_prepare_notebook.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook"} # type: ignore + + @distributed_trace + def list_storage_account_keys( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.ListStorageAccountKeysResult: + """List storage account keys of a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListStorageAccountKeysResult or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ListStorageAccountKeysResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ListStorageAccountKeysResult] + + request = build_list_storage_account_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_storage_account_keys.metadata["url"], + headers=_headers, + params=_params, ) - list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ListStorageAccountKeysResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_storage_account_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys"} # type: ignore + + @distributed_trace + def list_notebook_keys( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.ListNotebookKeysResult: + """List keys of a notebook. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListNotebookKeysResult or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ListNotebookKeysResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ListNotebookKeysResult] + + request = build_list_notebook_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_notebook_keys.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ListNotebookKeysResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_notebook_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys"} # type: ignore + + @distributed_trace + def list_outbound_network_dependencies_endpoints( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.ExternalFQDNResponse: + """Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs) + programmatically. + + Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs) + programmatically. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExternalFQDNResponse or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ExternalFQDNResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[_models.ExternalFQDNResponse] + + request = build_list_outbound_network_dependencies_endpoints_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_outbound_network_dependencies_endpoints.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ExternalFQDNResponse", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_outbound_network_dependencies_endpoints.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints"} # type: ignore