diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/_meta.json b/sdk/machinelearning/azure-mgmt-machinelearningservices/_meta.json index f4f9290aab9b..586c4624d36f 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/_meta.json +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/_meta.json @@ -1,11 +1,11 @@ { - "commit": "c7daa3d35baaaabece0dbc6f731eadbe426973b9", + "commit": "a2f1652a44f86c27277b025615c6ba98bfc3440f", "repository_url": "https://github.com/Azure/azure-rest-api-specs", - "autorest": "3.9.2", + "autorest": "3.9.7", "use": [ - "@autorest/python@6.4.12", - "@autorest/modelerfour@4.24.3" + "@autorest/python@6.7.1", + "@autorest/modelerfour@4.26.2" ], - "autorest_command": "autorest specification/machinelearningservices/resource-manager/readme.md --generate-sample=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/home/vsts/work/1/azure-sdk-for-python/sdk --use=@autorest/python@6.4.12 --use=@autorest/modelerfour@4.24.3 --version=3.9.2 --version-tolerant=False", + "autorest_command": "autorest specification/machinelearningservices/resource-manager/readme.md --generate-sample=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/mnt/vss/_work/1/s/azure-sdk-for-python/sdk --use=@autorest/python@6.7.1 --use=@autorest/modelerfour@4.26.2 --version=3.9.7 --version-tolerant=False", "readme": "specification/machinelearningservices/resource-manager/readme.md" } \ No newline at end of file diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_configuration.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_configuration.py index a128695e54fd..8720abd11dd0 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_configuration.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_configuration.py @@ -29,14 +29,14 @@ class MachineLearningServicesMgmtClientConfiguration(Configuration): # pylint: :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str - :keyword api_version: Api Version. Default value is "2023-04-01". Note that overriding this - default value may result in unsupported behavior. + :keyword api_version: Api Version. Default value is "2023-08-01-preview". Note that overriding + this default value may result in unsupported behavior. :paramtype api_version: str """ def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None: super(MachineLearningServicesMgmtClientConfiguration, self).__init__(**kwargs) - api_version: str = kwargs.pop("api_version", "2023-04-01") + api_version: str = kwargs.pop("api_version", "2023-08-01-preview") if credential is None: raise ValueError("Parameter 'credential' must not be None.") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_machine_learning_services_mgmt_client.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_machine_learning_services_mgmt_client.py index 690ca6db66e4..202381dbcc5d 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_machine_learning_services_mgmt_client.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_machine_learning_services_mgmt_client.py @@ -28,7 +28,18 @@ DatastoresOperations, EnvironmentContainersOperations, EnvironmentVersionsOperations, + FeaturesOperations, + FeaturesetContainersOperations, + FeaturesetVersionsOperations, + FeaturestoreEntityContainersOperations, + FeaturestoreEntityVersionsOperations, + InferenceEndpointsOperations, + InferenceGroupsOperations, + InferencePoolsOperations, JobsOperations, + LabelingJobsOperations, + ManagedNetworkProvisionsOperations, + ManagedNetworkSettingsRuleOperations, ModelContainersOperations, ModelVersionsOperations, OnlineDeploymentsOperations, @@ -49,6 +60,7 @@ RegistryModelContainersOperations, RegistryModelVersionsOperations, SchedulesOperations, + ServerlessEndpointsOperations, UsagesOperations, VirtualMachineSizesOperations, WorkspaceConnectionsOperations, @@ -64,10 +76,6 @@ class MachineLearningServicesMgmtClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes """These APIs allow end users to operate on Azure Machine Learning Workspace resources. - :ivar operations: Operations operations - :vartype operations: azure.mgmt.machinelearningservices.operations.Operations - :ivar workspaces: WorkspacesOperations operations - :vartype workspaces: azure.mgmt.machinelearningservices.operations.WorkspacesOperations :ivar usages: UsagesOperations operations :vartype usages: azure.mgmt.machinelearningservices.operations.UsagesOperations :ivar virtual_machine_sizes: VirtualMachineSizesOperations operations @@ -77,15 +85,6 @@ class MachineLearningServicesMgmtClient: # pylint: disable=client-accepts-api-v :vartype quotas: azure.mgmt.machinelearningservices.operations.QuotasOperations :ivar compute: ComputeOperations operations :vartype compute: azure.mgmt.machinelearningservices.operations.ComputeOperations - :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations - :vartype private_endpoint_connections: - azure.mgmt.machinelearningservices.operations.PrivateEndpointConnectionsOperations - :ivar private_link_resources: PrivateLinkResourcesOperations operations - :vartype private_link_resources: - azure.mgmt.machinelearningservices.operations.PrivateLinkResourcesOperations - :ivar workspace_connections: WorkspaceConnectionsOperations operations - :vartype workspace_connections: - azure.mgmt.machinelearningservices.operations.WorkspaceConnectionsOperations :ivar registry_code_containers: RegistryCodeContainersOperations operations :vartype registry_code_containers: azure.mgmt.machinelearningservices.operations.RegistryCodeContainersOperations @@ -146,8 +145,33 @@ class MachineLearningServicesMgmtClient: # pylint: disable=client-accepts-api-v :ivar environment_versions: EnvironmentVersionsOperations operations :vartype environment_versions: azure.mgmt.machinelearningservices.operations.EnvironmentVersionsOperations + :ivar featureset_containers: FeaturesetContainersOperations operations + :vartype featureset_containers: + azure.mgmt.machinelearningservices.operations.FeaturesetContainersOperations + :ivar features: FeaturesOperations operations + :vartype features: azure.mgmt.machinelearningservices.operations.FeaturesOperations + :ivar featureset_versions: FeaturesetVersionsOperations operations + :vartype featureset_versions: + azure.mgmt.machinelearningservices.operations.FeaturesetVersionsOperations + :ivar featurestore_entity_containers: FeaturestoreEntityContainersOperations operations + :vartype featurestore_entity_containers: + azure.mgmt.machinelearningservices.operations.FeaturestoreEntityContainersOperations + :ivar featurestore_entity_versions: FeaturestoreEntityVersionsOperations operations + :vartype featurestore_entity_versions: + azure.mgmt.machinelearningservices.operations.FeaturestoreEntityVersionsOperations + :ivar inference_pools: InferencePoolsOperations operations + :vartype inference_pools: + azure.mgmt.machinelearningservices.operations.InferencePoolsOperations + :ivar inference_groups: InferenceGroupsOperations operations + :vartype inference_groups: + azure.mgmt.machinelearningservices.operations.InferenceGroupsOperations + :ivar inference_endpoints: InferenceEndpointsOperations operations + :vartype inference_endpoints: + azure.mgmt.machinelearningservices.operations.InferenceEndpointsOperations :ivar jobs: JobsOperations operations :vartype jobs: azure.mgmt.machinelearningservices.operations.JobsOperations + :ivar labeling_jobs: LabelingJobsOperations operations + :vartype labeling_jobs: azure.mgmt.machinelearningservices.operations.LabelingJobsOperations :ivar model_containers: ModelContainersOperations operations :vartype model_containers: azure.mgmt.machinelearningservices.operations.ModelContainersOperations @@ -161,19 +185,41 @@ class MachineLearningServicesMgmtClient: # pylint: disable=client-accepts-api-v azure.mgmt.machinelearningservices.operations.OnlineDeploymentsOperations :ivar schedules: SchedulesOperations operations :vartype schedules: azure.mgmt.machinelearningservices.operations.SchedulesOperations + :ivar serverless_endpoints: ServerlessEndpointsOperations operations + :vartype serverless_endpoints: + azure.mgmt.machinelearningservices.operations.ServerlessEndpointsOperations :ivar registries: RegistriesOperations operations :vartype registries: azure.mgmt.machinelearningservices.operations.RegistriesOperations :ivar workspace_features: WorkspaceFeaturesOperations operations :vartype workspace_features: azure.mgmt.machinelearningservices.operations.WorkspaceFeaturesOperations + :ivar operations: Operations operations + :vartype operations: azure.mgmt.machinelearningservices.operations.Operations + :ivar workspaces: WorkspacesOperations operations + :vartype workspaces: azure.mgmt.machinelearningservices.operations.WorkspacesOperations + :ivar workspace_connections: WorkspaceConnectionsOperations operations + :vartype workspace_connections: + azure.mgmt.machinelearningservices.operations.WorkspaceConnectionsOperations + :ivar managed_network_settings_rule: ManagedNetworkSettingsRuleOperations operations + :vartype managed_network_settings_rule: + azure.mgmt.machinelearningservices.operations.ManagedNetworkSettingsRuleOperations + :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations + :vartype private_endpoint_connections: + azure.mgmt.machinelearningservices.operations.PrivateEndpointConnectionsOperations + :ivar private_link_resources: PrivateLinkResourcesOperations operations + :vartype private_link_resources: + azure.mgmt.machinelearningservices.operations.PrivateLinkResourcesOperations + :ivar managed_network_provisions: ManagedNetworkProvisionsOperations operations + :vartype managed_network_provisions: + azure.mgmt.machinelearningservices.operations.ManagedNetworkProvisionsOperations :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str :param base_url: Service URL. Default value is "https://management.azure.com". :type base_url: str - :keyword api_version: Api Version. Default value is "2023-04-01". Note that overriding this - default value may result in unsupported behavior. + :keyword api_version: Api Version. Default value is "2023-08-01-preview". Note that overriding + this default value may result in unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. @@ -195,23 +241,12 @@ def __init__( self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self._serialize.client_side_validation = False - self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) - self.workspaces = WorkspacesOperations(self._client, self._config, self._serialize, self._deserialize) self.usages = UsagesOperations(self._client, self._config, self._serialize, self._deserialize) self.virtual_machine_sizes = VirtualMachineSizesOperations( self._client, self._config, self._serialize, self._deserialize ) self.quotas = QuotasOperations(self._client, self._config, self._serialize, self._deserialize) self.compute = ComputeOperations(self._client, self._config, self._serialize, self._deserialize) - self.private_endpoint_connections = PrivateEndpointConnectionsOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.private_link_resources = PrivateLinkResourcesOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.workspace_connections = WorkspaceConnectionsOperations( - self._client, self._config, self._serialize, self._deserialize - ) self.registry_code_containers = RegistryCodeContainersOperations( self._client, self._config, self._serialize, self._deserialize ) @@ -263,7 +298,28 @@ def __init__( self.environment_versions = EnvironmentVersionsOperations( self._client, self._config, self._serialize, self._deserialize ) + self.featureset_containers = FeaturesetContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.features = FeaturesOperations(self._client, self._config, self._serialize, self._deserialize) + self.featureset_versions = FeaturesetVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.featurestore_entity_containers = FeaturestoreEntityContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.featurestore_entity_versions = FeaturestoreEntityVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.inference_pools = InferencePoolsOperations(self._client, self._config, self._serialize, self._deserialize) + self.inference_groups = InferenceGroupsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.inference_endpoints = InferenceEndpointsOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.jobs = JobsOperations(self._client, self._config, self._serialize, self._deserialize) + self.labeling_jobs = LabelingJobsOperations(self._client, self._config, self._serialize, self._deserialize) self.model_containers = ModelContainersOperations( self._client, self._config, self._serialize, self._deserialize ) @@ -275,10 +331,30 @@ def __init__( self._client, self._config, self._serialize, self._deserialize ) self.schedules = SchedulesOperations(self._client, self._config, self._serialize, self._deserialize) + self.serverless_endpoints = ServerlessEndpointsOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.registries = RegistriesOperations(self._client, self._config, self._serialize, self._deserialize) self.workspace_features = WorkspaceFeaturesOperations( self._client, self._config, self._serialize, self._deserialize ) + self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) + self.workspaces = WorkspacesOperations(self._client, self._config, self._serialize, self._deserialize) + self.workspace_connections = WorkspaceConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.managed_network_settings_rule = ManagedNetworkSettingsRuleOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.private_endpoint_connections = PrivateEndpointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.private_link_resources = PrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.managed_network_provisions = ManagedNetworkProvisionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse: """Runs the network request through the client's chained policies. diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_serialization.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_serialization.py index 842ae727fbbc..4bae2292227b 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_serialization.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_serialization.py @@ -662,8 +662,9 @@ def _serialize(self, target_obj, data_type=None, **kwargs): _serialized.update(_new_attr) # type: ignore _new_attr = _new_attr[k] # type: ignore _serialized = _serialized[k] - except ValueError: - continue + except ValueError as err: + if isinstance(err, SerializationError): + raise except (AttributeError, KeyError, TypeError) as err: msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) @@ -741,6 +742,8 @@ def query(self, name, data, data_type, **kwargs): :param data: The data to be serialized. :param str data_type: The type to be serialized from. + :keyword bool skip_quote: Whether to skip quote the serialized result. + Defaults to False. :rtype: str :raises: TypeError if serialization fails. :raises: ValueError if data is None @@ -749,10 +752,8 @@ def query(self, name, data, data_type, **kwargs): # Treat the list aside, since we don't want to encode the div separator if data_type.startswith("["): internal_data_type = data_type[1:-1] - data = [self.serialize_data(d, internal_data_type, **kwargs) if d is not None else "" for d in data] - if not kwargs.get("skip_quote", False): - data = [quote(str(d), safe="") for d in data] - return str(self.serialize_iter(data, internal_data_type, **kwargs)) + do_quote = not kwargs.get("skip_quote", False) + return str(self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs)) # Not a list, regular serialization output = self.serialize_data(data, data_type, **kwargs) @@ -891,6 +892,8 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs): not be None or empty. :param str div: If set, this str will be used to combine the elements in the iterable into a combined string. Default is 'None'. + :keyword bool do_quote: Whether to quote the serialized result of each iterable element. + Defaults to False. :rtype: list, str """ if isinstance(data, str): @@ -903,9 +906,14 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs): for d in data: try: serialized.append(self.serialize_data(d, iter_type, **kwargs)) - except ValueError: + except ValueError as err: + if isinstance(err, SerializationError): + raise serialized.append(None) + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + if div: serialized = ["" if s is None else str(s) for s in serialized] serialized = div.join(serialized) @@ -950,7 +958,9 @@ def serialize_dict(self, attr, dict_type, **kwargs): for key, value in attr.items(): try: serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) - except ValueError: + except ValueError as err: + if isinstance(err, SerializationError): + raise serialized[self.serialize_unicode(key)] = None if "xml" in serialization_ctxt: diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_vendor.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_vendor.py index bd0df84f5319..0dafe0e287ff 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_vendor.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_vendor.py @@ -5,8 +5,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import List, cast - from azure.core.pipeline.transport import HttpRequest @@ -16,15 +14,3 @@ def _convert_request(request, files=None): if files: request.set_formdata_body(files) return request - - -def _format_url_section(template, **kwargs): - components = template.split("/") - while components: - try: - return template.format(**kwargs) - except KeyError as key: - # Need the cast, as for some reasons "split" is typed as list[str | Any] - formatted_components = cast(List[str], template.split("/")) - components = [c for c in formatted_components if "{}".format(key.args[0]) not in c] - template = "/".join(components) diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_version.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_version.py index 2eda20789583..e5754a47ce68 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_version.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "2.0.0b2" +VERSION = "1.0.0b1" diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_configuration.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_configuration.py index f012a758393b..916f34cd0131 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_configuration.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_configuration.py @@ -29,14 +29,14 @@ class MachineLearningServicesMgmtClientConfiguration(Configuration): # pylint: :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str - :keyword api_version: Api Version. Default value is "2023-04-01". Note that overriding this - default value may result in unsupported behavior. + :keyword api_version: Api Version. Default value is "2023-08-01-preview". Note that overriding + this default value may result in unsupported behavior. :paramtype api_version: str """ def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None: super(MachineLearningServicesMgmtClientConfiguration, self).__init__(**kwargs) - api_version: str = kwargs.pop("api_version", "2023-04-01") + api_version: str = kwargs.pop("api_version", "2023-08-01-preview") if credential is None: raise ValueError("Parameter 'credential' must not be None.") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_machine_learning_services_mgmt_client.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_machine_learning_services_mgmt_client.py index 0240f80e8769..f07ffe4dc192 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_machine_learning_services_mgmt_client.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_machine_learning_services_mgmt_client.py @@ -28,7 +28,18 @@ DatastoresOperations, EnvironmentContainersOperations, EnvironmentVersionsOperations, + FeaturesOperations, + FeaturesetContainersOperations, + FeaturesetVersionsOperations, + FeaturestoreEntityContainersOperations, + FeaturestoreEntityVersionsOperations, + InferenceEndpointsOperations, + InferenceGroupsOperations, + InferencePoolsOperations, JobsOperations, + LabelingJobsOperations, + ManagedNetworkProvisionsOperations, + ManagedNetworkSettingsRuleOperations, ModelContainersOperations, ModelVersionsOperations, OnlineDeploymentsOperations, @@ -49,6 +60,7 @@ RegistryModelContainersOperations, RegistryModelVersionsOperations, SchedulesOperations, + ServerlessEndpointsOperations, UsagesOperations, VirtualMachineSizesOperations, WorkspaceConnectionsOperations, @@ -64,10 +76,6 @@ class MachineLearningServicesMgmtClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes """These APIs allow end users to operate on Azure Machine Learning Workspace resources. - :ivar operations: Operations operations - :vartype operations: azure.mgmt.machinelearningservices.aio.operations.Operations - :ivar workspaces: WorkspacesOperations operations - :vartype workspaces: azure.mgmt.machinelearningservices.aio.operations.WorkspacesOperations :ivar usages: UsagesOperations operations :vartype usages: azure.mgmt.machinelearningservices.aio.operations.UsagesOperations :ivar virtual_machine_sizes: VirtualMachineSizesOperations operations @@ -77,15 +85,6 @@ class MachineLearningServicesMgmtClient: # pylint: disable=client-accepts-api-v :vartype quotas: azure.mgmt.machinelearningservices.aio.operations.QuotasOperations :ivar compute: ComputeOperations operations :vartype compute: azure.mgmt.machinelearningservices.aio.operations.ComputeOperations - :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations - :vartype private_endpoint_connections: - azure.mgmt.machinelearningservices.aio.operations.PrivateEndpointConnectionsOperations - :ivar private_link_resources: PrivateLinkResourcesOperations operations - :vartype private_link_resources: - azure.mgmt.machinelearningservices.aio.operations.PrivateLinkResourcesOperations - :ivar workspace_connections: WorkspaceConnectionsOperations operations - :vartype workspace_connections: - azure.mgmt.machinelearningservices.aio.operations.WorkspaceConnectionsOperations :ivar registry_code_containers: RegistryCodeContainersOperations operations :vartype registry_code_containers: azure.mgmt.machinelearningservices.aio.operations.RegistryCodeContainersOperations @@ -148,8 +147,34 @@ class MachineLearningServicesMgmtClient: # pylint: disable=client-accepts-api-v :ivar environment_versions: EnvironmentVersionsOperations operations :vartype environment_versions: azure.mgmt.machinelearningservices.aio.operations.EnvironmentVersionsOperations + :ivar featureset_containers: FeaturesetContainersOperations operations + :vartype featureset_containers: + azure.mgmt.machinelearningservices.aio.operations.FeaturesetContainersOperations + :ivar features: FeaturesOperations operations + :vartype features: azure.mgmt.machinelearningservices.aio.operations.FeaturesOperations + :ivar featureset_versions: FeaturesetVersionsOperations operations + :vartype featureset_versions: + azure.mgmt.machinelearningservices.aio.operations.FeaturesetVersionsOperations + :ivar featurestore_entity_containers: FeaturestoreEntityContainersOperations operations + :vartype featurestore_entity_containers: + azure.mgmt.machinelearningservices.aio.operations.FeaturestoreEntityContainersOperations + :ivar featurestore_entity_versions: FeaturestoreEntityVersionsOperations operations + :vartype featurestore_entity_versions: + azure.mgmt.machinelearningservices.aio.operations.FeaturestoreEntityVersionsOperations + :ivar inference_pools: InferencePoolsOperations operations + :vartype inference_pools: + azure.mgmt.machinelearningservices.aio.operations.InferencePoolsOperations + :ivar inference_groups: InferenceGroupsOperations operations + :vartype inference_groups: + azure.mgmt.machinelearningservices.aio.operations.InferenceGroupsOperations + :ivar inference_endpoints: InferenceEndpointsOperations operations + :vartype inference_endpoints: + azure.mgmt.machinelearningservices.aio.operations.InferenceEndpointsOperations :ivar jobs: JobsOperations operations :vartype jobs: azure.mgmt.machinelearningservices.aio.operations.JobsOperations + :ivar labeling_jobs: LabelingJobsOperations operations + :vartype labeling_jobs: + azure.mgmt.machinelearningservices.aio.operations.LabelingJobsOperations :ivar model_containers: ModelContainersOperations operations :vartype model_containers: azure.mgmt.machinelearningservices.aio.operations.ModelContainersOperations @@ -164,19 +189,41 @@ class MachineLearningServicesMgmtClient: # pylint: disable=client-accepts-api-v azure.mgmt.machinelearningservices.aio.operations.OnlineDeploymentsOperations :ivar schedules: SchedulesOperations operations :vartype schedules: azure.mgmt.machinelearningservices.aio.operations.SchedulesOperations + :ivar serverless_endpoints: ServerlessEndpointsOperations operations + :vartype serverless_endpoints: + azure.mgmt.machinelearningservices.aio.operations.ServerlessEndpointsOperations :ivar registries: RegistriesOperations operations :vartype registries: azure.mgmt.machinelearningservices.aio.operations.RegistriesOperations :ivar workspace_features: WorkspaceFeaturesOperations operations :vartype workspace_features: azure.mgmt.machinelearningservices.aio.operations.WorkspaceFeaturesOperations + :ivar operations: Operations operations + :vartype operations: azure.mgmt.machinelearningservices.aio.operations.Operations + :ivar workspaces: WorkspacesOperations operations + :vartype workspaces: azure.mgmt.machinelearningservices.aio.operations.WorkspacesOperations + :ivar workspace_connections: WorkspaceConnectionsOperations operations + :vartype workspace_connections: + azure.mgmt.machinelearningservices.aio.operations.WorkspaceConnectionsOperations + :ivar managed_network_settings_rule: ManagedNetworkSettingsRuleOperations operations + :vartype managed_network_settings_rule: + azure.mgmt.machinelearningservices.aio.operations.ManagedNetworkSettingsRuleOperations + :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations + :vartype private_endpoint_connections: + azure.mgmt.machinelearningservices.aio.operations.PrivateEndpointConnectionsOperations + :ivar private_link_resources: PrivateLinkResourcesOperations operations + :vartype private_link_resources: + azure.mgmt.machinelearningservices.aio.operations.PrivateLinkResourcesOperations + :ivar managed_network_provisions: ManagedNetworkProvisionsOperations operations + :vartype managed_network_provisions: + azure.mgmt.machinelearningservices.aio.operations.ManagedNetworkProvisionsOperations :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str :param base_url: Service URL. Default value is "https://management.azure.com". :type base_url: str - :keyword api_version: Api Version. Default value is "2023-04-01". Note that overriding this - default value may result in unsupported behavior. + :keyword api_version: Api Version. Default value is "2023-08-01-preview". Note that overriding + this default value may result in unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. @@ -198,23 +245,12 @@ def __init__( self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self._serialize.client_side_validation = False - self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) - self.workspaces = WorkspacesOperations(self._client, self._config, self._serialize, self._deserialize) self.usages = UsagesOperations(self._client, self._config, self._serialize, self._deserialize) self.virtual_machine_sizes = VirtualMachineSizesOperations( self._client, self._config, self._serialize, self._deserialize ) self.quotas = QuotasOperations(self._client, self._config, self._serialize, self._deserialize) self.compute = ComputeOperations(self._client, self._config, self._serialize, self._deserialize) - self.private_endpoint_connections = PrivateEndpointConnectionsOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.private_link_resources = PrivateLinkResourcesOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.workspace_connections = WorkspaceConnectionsOperations( - self._client, self._config, self._serialize, self._deserialize - ) self.registry_code_containers = RegistryCodeContainersOperations( self._client, self._config, self._serialize, self._deserialize ) @@ -266,7 +302,28 @@ def __init__( self.environment_versions = EnvironmentVersionsOperations( self._client, self._config, self._serialize, self._deserialize ) + self.featureset_containers = FeaturesetContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.features = FeaturesOperations(self._client, self._config, self._serialize, self._deserialize) + self.featureset_versions = FeaturesetVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.featurestore_entity_containers = FeaturestoreEntityContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.featurestore_entity_versions = FeaturestoreEntityVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.inference_pools = InferencePoolsOperations(self._client, self._config, self._serialize, self._deserialize) + self.inference_groups = InferenceGroupsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.inference_endpoints = InferenceEndpointsOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.jobs = JobsOperations(self._client, self._config, self._serialize, self._deserialize) + self.labeling_jobs = LabelingJobsOperations(self._client, self._config, self._serialize, self._deserialize) self.model_containers = ModelContainersOperations( self._client, self._config, self._serialize, self._deserialize ) @@ -278,10 +335,30 @@ def __init__( self._client, self._config, self._serialize, self._deserialize ) self.schedules = SchedulesOperations(self._client, self._config, self._serialize, self._deserialize) + self.serverless_endpoints = ServerlessEndpointsOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.registries = RegistriesOperations(self._client, self._config, self._serialize, self._deserialize) self.workspace_features = WorkspaceFeaturesOperations( self._client, self._config, self._serialize, self._deserialize ) + self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) + self.workspaces = WorkspacesOperations(self._client, self._config, self._serialize, self._deserialize) + self.workspace_connections = WorkspaceConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.managed_network_settings_rule = ManagedNetworkSettingsRuleOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.private_endpoint_connections = PrivateEndpointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.private_link_resources = PrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.managed_network_provisions = ManagedNetworkProvisionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]: """Runs the network request through the client's chained policies. diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/__init__.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/__init__.py index 4967e3af6930..95c8e68c6e97 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/__init__.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/__init__.py @@ -6,15 +6,10 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._operations import Operations -from ._workspaces_operations import WorkspacesOperations from ._usages_operations import UsagesOperations from ._virtual_machine_sizes_operations import VirtualMachineSizesOperations from ._quotas_operations import QuotasOperations from ._compute_operations import ComputeOperations -from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations -from ._private_link_resources_operations import PrivateLinkResourcesOperations -from ._workspace_connections_operations import WorkspaceConnectionsOperations from ._registry_code_containers_operations import RegistryCodeContainersOperations from ._registry_code_versions_operations import RegistryCodeVersionsOperations from ._registry_component_containers_operations import RegistryComponentContainersOperations @@ -36,29 +31,41 @@ from ._datastores_operations import DatastoresOperations from ._environment_containers_operations import EnvironmentContainersOperations from ._environment_versions_operations import EnvironmentVersionsOperations +from ._featureset_containers_operations import FeaturesetContainersOperations +from ._features_operations import FeaturesOperations +from ._featureset_versions_operations import FeaturesetVersionsOperations +from ._featurestore_entity_containers_operations import FeaturestoreEntityContainersOperations +from ._featurestore_entity_versions_operations import FeaturestoreEntityVersionsOperations +from ._inference_pools_operations import InferencePoolsOperations +from ._inference_groups_operations import InferenceGroupsOperations +from ._inference_endpoints_operations import InferenceEndpointsOperations from ._jobs_operations import JobsOperations +from ._labeling_jobs_operations import LabelingJobsOperations from ._model_containers_operations import ModelContainersOperations from ._model_versions_operations import ModelVersionsOperations from ._online_endpoints_operations import OnlineEndpointsOperations from ._online_deployments_operations import OnlineDeploymentsOperations from ._schedules_operations import SchedulesOperations +from ._serverless_endpoints_operations import ServerlessEndpointsOperations from ._registries_operations import RegistriesOperations from ._workspace_features_operations import WorkspaceFeaturesOperations +from ._operations import Operations +from ._workspaces_operations import WorkspacesOperations +from ._workspace_connections_operations import WorkspaceConnectionsOperations +from ._managed_network_settings_rule_operations import ManagedNetworkSettingsRuleOperations +from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations +from ._private_link_resources_operations import PrivateLinkResourcesOperations +from ._managed_network_provisions_operations import ManagedNetworkProvisionsOperations from ._patch import __all__ as _patch_all from ._patch import * # pylint: disable=unused-wildcard-import from ._patch import patch_sdk as _patch_sdk __all__ = [ - "Operations", - "WorkspacesOperations", "UsagesOperations", "VirtualMachineSizesOperations", "QuotasOperations", "ComputeOperations", - "PrivateEndpointConnectionsOperations", - "PrivateLinkResourcesOperations", - "WorkspaceConnectionsOperations", "RegistryCodeContainersOperations", "RegistryCodeVersionsOperations", "RegistryComponentContainersOperations", @@ -80,14 +87,31 @@ "DatastoresOperations", "EnvironmentContainersOperations", "EnvironmentVersionsOperations", + "FeaturesetContainersOperations", + "FeaturesOperations", + "FeaturesetVersionsOperations", + "FeaturestoreEntityContainersOperations", + "FeaturestoreEntityVersionsOperations", + "InferencePoolsOperations", + "InferenceGroupsOperations", + "InferenceEndpointsOperations", "JobsOperations", + "LabelingJobsOperations", "ModelContainersOperations", "ModelVersionsOperations", "OnlineEndpointsOperations", "OnlineDeploymentsOperations", "SchedulesOperations", + "ServerlessEndpointsOperations", "RegistriesOperations", "WorkspaceFeaturesOperations", + "Operations", + "WorkspacesOperations", + "WorkspaceConnectionsOperations", + "ManagedNetworkSettingsRuleOperations", + "PrivateEndpointConnectionsOperations", + "PrivateLinkResourcesOperations", + "ManagedNetworkProvisionsOperations", ] __all__.extend([p for p in _patch_all if p not in __all__]) _patch_sdk() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_component_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_component_versions_operations.py index 5910c2b49220..e7549f81e16e 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_component_versions_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_component_versions_operations.py @@ -69,6 +69,7 @@ def list( top: Optional[int] = None, skip: Optional[str] = None, list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, **kwargs: Any ) -> AsyncIterable["_models.ComponentVersion"]: """List component versions. @@ -91,6 +92,8 @@ def list( :param list_view_type: View type for including/excluding (for example) archived entities. Known values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param stage: Component stage. Default value is None. + :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ComponentVersion or the result of cls(response) :rtype: @@ -123,6 +126,7 @@ def prepare_request(next_link=None): top=top, skip=skip, list_view_type=list_view_type, + stage=stage, api_version=api_version, template_url=self.list.metadata["url"], headers=_headers, diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_compute_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_compute_operations.py index fff3c389b06b..775cb8fb2b21 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_compute_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_compute_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, Callable, Dict, IO, List, Optional, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -34,13 +34,17 @@ from ...operations._compute_operations import ( build_create_or_update_request, build_delete_request, + build_get_allowed_resize_sizes_request, build_get_request, build_list_keys_request, build_list_nodes_request, build_list_request, + build_resize_request, build_restart_request, build_start_request, build_stop_request, + build_update_custom_services_request, + build_update_idle_shutdown_setting_request, build_update_request, ) @@ -48,7 +52,7 @@ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class ComputeOperations: +class ComputeOperations: # pylint: disable=too-many-public-methods """ .. warning:: **DO NOT** instantiate this class directly. @@ -866,6 +870,155 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}" } + @overload + async def update_custom_services( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + custom_services: List[_models.CustomService], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Updates the custom services list. The list of custom services provided shall be overwritten. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param custom_services: New list of Custom Services. Required. + :type custom_services: list[~azure.mgmt.machinelearningservices.models.CustomService] + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update_custom_services( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + custom_services: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Updates the custom services list. The list of custom services provided shall be overwritten. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param custom_services: New list of Custom Services. Required. + :type custom_services: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def update_custom_services( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + custom_services: Union[List[_models.CustomService], IO], + **kwargs: Any + ) -> None: + """Updates the custom services list. The list of custom services provided shall be overwritten. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param custom_services: New list of Custom Services. Is either a [CustomService] type or a IO + type. Required. + :type custom_services: list[~azure.mgmt.machinelearningservices.models.CustomService] or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(custom_services, (IOBase, bytes)): + _content = custom_services + else: + _json = self._serialize.body(custom_services, "[CustomService]") + + request = build_update_custom_services_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.update_custom_services.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + update_custom_services.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/customServices" + } + @distributed_trace def list_nodes( self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any @@ -1384,3 +1537,452 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- begin_restart.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart" } + + @overload + async def update_idle_shutdown_setting( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.IdleShutdownSetting, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Updates the idle shutdown setting of a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating idle shutdown setting of specified ComputeInstance. + Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.IdleShutdownSetting + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update_idle_shutdown_setting( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Updates the idle shutdown setting of a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating idle shutdown setting of specified ComputeInstance. + Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def update_idle_shutdown_setting( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.IdleShutdownSetting, IO], + **kwargs: Any + ) -> None: + """Updates the idle shutdown setting of a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating idle shutdown setting of specified ComputeInstance. + Is either a IdleShutdownSetting type or a IO type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.IdleShutdownSetting or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "IdleShutdownSetting") + + request = build_update_idle_shutdown_setting_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.update_idle_shutdown_setting.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + update_idle_shutdown_setting.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/updateIdleShutdownSetting" + } + + @distributed_trace_async + async def get_allowed_resize_sizes( + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> _models.VirtualMachineSizeListResult: + """Returns supported virtual machine sizes for resize. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: VirtualMachineSizeListResult or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.VirtualMachineSizeListResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.VirtualMachineSizeListResult] = kwargs.pop("cls", None) + + request = build_get_allowed_resize_sizes_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_allowed_resize_sizes.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("VirtualMachineSizeListResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_allowed_resize_sizes.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/getAllowedVmSizesForResize" + } + + async def _resize_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.ResizeSchema, IO], + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "ResizeSchema") + + request = build_resize_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._resize_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _resize_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/resize" + } + + @overload + async def begin_resize( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.ResizeSchema, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Updates the size of a Compute Instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating VM size setting of specified Compute Instance. + Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.ResizeSchema + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_resize( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Updates the size of a Compute Instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating VM size setting of specified Compute Instance. + Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_resize( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.ResizeSchema, IO], + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Updates the size of a Compute Instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating VM size setting of specified Compute Instance. Is + either a ResizeSchema type or a IO type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.ResizeSchema or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._resize_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_resize.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/resize" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_data_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_data_versions_operations.py index 875689bd263c..007f81e226ee 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_data_versions_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_data_versions_operations.py @@ -70,6 +70,7 @@ def list( skip: Optional[str] = None, tags: Optional[str] = None, list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, **kwargs: Any ) -> AsyncIterable["_models.DataVersionBase"]: """List data versions in the data container. @@ -99,6 +100,8 @@ def list( ListViewType.All]View type for including/excluding (for example) archived entities. Known values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param stage: data stage. Default value is None. + :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DataVersionBase or the result of cls(response) :rtype: @@ -132,6 +135,7 @@ def prepare_request(next_link=None): skip=skip, tags=tags, list_view_type=list_view_type, + stage=stage, api_version=api_version, template_url=self.list.metadata["url"], headers=_headers, diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_environment_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_environment_versions_operations.py index 031930c06fc8..ddbf157d22bb 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_environment_versions_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_environment_versions_operations.py @@ -69,6 +69,7 @@ def list( top: Optional[int] = None, skip: Optional[str] = None, list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, **kwargs: Any ) -> AsyncIterable["_models.EnvironmentVersion"]: """List versions. @@ -91,6 +92,9 @@ def list( :param list_view_type: View type for including/excluding (for example) archived entities. Known values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param stage: Stage for including/excluding (for example) archived entities. Takes priority + over listViewType. Default value is None. + :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either EnvironmentVersion or the result of cls(response) :rtype: @@ -123,6 +127,7 @@ def prepare_request(next_link=None): top=top, skip=skip, list_view_type=list_view_type, + stage=stage, api_version=api_version, template_url=self.list.metadata["url"], headers=_headers, diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_features_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_features_operations.py new file mode 100644 index 000000000000..713179b1c8e1 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_features_operations.py @@ -0,0 +1,269 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._features_operations import build_get_request, build_list_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class FeaturesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.MachineLearningServicesMgmtClient`'s + :attr:`features` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + featureset_name: str, + featureset_version: str, + skip: Optional[str] = None, + tags: Optional[str] = None, + feature_name: Optional[str] = None, + description: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 1000, + **kwargs: Any + ) -> AsyncIterable["_models.Feature"]: + """List Features. + + List Features. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param featureset_name: Featureset name. This is case-sensitive. Required. + :type featureset_name: str + :param featureset_version: Featureset Version identifier. This is case-sensitive. Required. + :type featureset_version: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :type tags: str + :param feature_name: feature name. Default value is None. + :type feature_name: str + :param description: Description of the featureset. Default value is None. + :type description: str + :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param page_size: Page size. Default value is 1000. + :type page_size: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either Feature or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.Feature] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeatureResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + featureset_name=featureset_name, + featureset_version=featureset_version, + subscription_id=self._config.subscription_id, + skip=skip, + tags=tags, + feature_name=feature_name, + description=description, + list_view_type=list_view_type, + page_size=page_size, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("FeatureResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{featuresetName}/versions/{featuresetVersion}/features" + } + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + workspace_name: str, + featureset_name: str, + featureset_version: str, + feature_name: str, + **kwargs: Any + ) -> _models.Feature: + """Get feature. + + Get feature. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param featureset_name: Feature set name. This is case-sensitive. Required. + :type featureset_name: str + :param featureset_version: Feature set version identifier. This is case-sensitive. Required. + :type featureset_version: str + :param feature_name: Feature Name. This is case-sensitive. Required. + :type feature_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Feature or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Feature + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.Feature] = kwargs.pop("cls", None) + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + featureset_name=featureset_name, + featureset_version=featureset_version, + feature_name=feature_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("Feature", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{featuresetName}/versions/{featuresetVersion}/features/{featureName}" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_featureset_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_featureset_containers_operations.py new file mode 100644 index 000000000000..da0306692f1d --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_featureset_containers_operations.py @@ -0,0 +1,649 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._featureset_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_entity_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class FeaturesetContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.MachineLearningServicesMgmtClient`'s + :attr:`featureset_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, + name: Optional[str] = None, + description: Optional[str] = None, + created_by: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_models.FeaturesetContainer"]: + """List featurestore entity containers. + + List featurestore entity containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :type tags: str + :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param page_size: page size. Default value is 20. + :type page_size: int + :param name: name for the featureset. Default value is None. + :type name: str + :param description: description for the feature set. Default value is None. + :type description: str + :param created_by: createdBy user name. Default value is None. + :type created_by: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either FeaturesetContainer or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.FeaturesetContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturesetContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + tags=tags, + list_view_type=list_view_type, + page_size=page_size, + name=name, + description=description, + created_by=created_by, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("FeaturesetContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets" + } + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}" + } + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}" + } + + @distributed_trace_async + async def get_entity( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.FeaturesetContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FeaturesetContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.FeaturesetContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturesetContainer] = kwargs.pop("cls", None) + + request = build_get_entity_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_entity.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("FeaturesetContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_entity.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}" + } + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.FeaturesetContainer, IO], + **kwargs: Any + ) -> _models.FeaturesetContainer: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturesetContainer] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "FeaturesetContainer") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("FeaturesetContainer", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("FeaturesetContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}" + } + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.FeaturesetContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturesetContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturesetContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturesetContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturesetContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.FeaturesetContainer, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturesetContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Is either a FeaturesetContainer type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturesetContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturesetContainer] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("FeaturesetContainer", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_featureset_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_featureset_versions_operations.py new file mode 100644 index 000000000000..b2ebbb208325 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_featureset_versions_operations.py @@ -0,0 +1,945 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._featureset_versions_operations import ( + build_backfill_request, + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class FeaturesetVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.MachineLearningServicesMgmtClient`'s + :attr:`featureset_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, + version_name: Optional[str] = None, + version: Optional[str] = None, + description: Optional[str] = None, + created_by: Optional[str] = None, + stage: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_models.FeaturesetVersion"]: + """List versions. + + List versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Featureset name. This is case-sensitive. Required. + :type name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :type tags: str + :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param page_size: page size. Default value is 20. + :type page_size: int + :param version_name: name for the featureset version. Default value is None. + :type version_name: str + :param version: featureset version. Default value is None. + :type version: str + :param description: description for the feature set version. Default value is None. + :type description: str + :param created_by: createdBy user name. Default value is None. + :type created_by: str + :param stage: Specifies the featurestore stage. Default value is None. + :type stage: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either FeaturesetVersion or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.FeaturesetVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturesetVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + skip=skip, + tags=tags, + list_view_type=list_view_type, + page_size=page_size, + version_name=version_name, + version=version, + description=description, + created_by=created_by, + stage=stage, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("FeaturesetVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions" + } + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}" + } + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}" + } + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.FeaturesetVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FeaturesetVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.FeaturesetVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturesetVersion] = kwargs.pop("cls", None) + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("FeaturesetVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}" + } + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.FeaturesetVersion, IO], + **kwargs: Any + ) -> _models.FeaturesetVersion: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturesetVersion] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "FeaturesetVersion") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("FeaturesetVersion", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("FeaturesetVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}" + } + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.FeaturesetVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturesetVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturesetVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturesetVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturesetVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.FeaturesetVersion, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturesetVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Is either a FeaturesetVersion type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturesetVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturesetVersion] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("FeaturesetVersion", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}" + } + + async def _backfill_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.FeaturesetVersionBackfillRequest, IO], + **kwargs: Any + ) -> Optional[_models.FeaturesetVersionBackfillResponse]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.FeaturesetVersionBackfillResponse]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "FeaturesetVersionBackfillRequest") + + request = build_backfill_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._backfill_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("FeaturesetVersionBackfillResponse", pipeline_response) + + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _backfill_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}/backfill" + } + + @overload + async def begin_backfill( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.FeaturesetVersionBackfillRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturesetVersionBackfillResponse]: + """Backfill. + + Backfill. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Feature set version backfill request entity. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetVersionBackfillRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturesetVersionBackfillResponse or + the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersionBackfillResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_backfill( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturesetVersionBackfillResponse]: + """Backfill. + + Backfill. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Feature set version backfill request entity. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturesetVersionBackfillResponse or + the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersionBackfillResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_backfill( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.FeaturesetVersionBackfillRequest, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturesetVersionBackfillResponse]: + """Backfill. + + Backfill. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Feature set version backfill request entity. Is either a + FeaturesetVersionBackfillRequest type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetVersionBackfillRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturesetVersionBackfillResponse or + the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersionBackfillResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturesetVersionBackfillResponse] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._backfill_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("FeaturesetVersionBackfillResponse", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_backfill.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}/backfill" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_featurestore_entity_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_featurestore_entity_containers_operations.py new file mode 100644 index 000000000000..1a68f75af25d --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_featurestore_entity_containers_operations.py @@ -0,0 +1,650 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._featurestore_entity_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_entity_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class FeaturestoreEntityContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.MachineLearningServicesMgmtClient`'s + :attr:`featurestore_entity_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, + name: Optional[str] = None, + description: Optional[str] = None, + created_by: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_models.FeaturestoreEntityContainer"]: + """List featurestore entity containers. + + List featurestore entity containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :type tags: str + :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param page_size: page size. Default value is 20. + :type page_size: int + :param name: name for the featurestore entity. Default value is None. + :type name: str + :param description: description for the featurestore entity. Default value is None. + :type description: str + :param created_by: createdBy user name. Default value is None. + :type created_by: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either FeaturestoreEntityContainer or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturestoreEntityContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + tags=tags, + list_view_type=list_view_type, + page_size=page_size, + name=name, + description=description, + created_by=created_by, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("FeaturestoreEntityContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities" + } + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}" + } + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}" + } + + @distributed_trace_async + async def get_entity( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.FeaturestoreEntityContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FeaturestoreEntityContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturestoreEntityContainer] = kwargs.pop("cls", None) + + request = build_get_entity_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_entity.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("FeaturestoreEntityContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_entity.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}" + } + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.FeaturestoreEntityContainer, IO], + **kwargs: Any + ) -> _models.FeaturestoreEntityContainer: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturestoreEntityContainer] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "FeaturestoreEntityContainer") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("FeaturestoreEntityContainer", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("FeaturestoreEntityContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}" + } + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.FeaturestoreEntityContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturestoreEntityContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturestoreEntityContainer or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturestoreEntityContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturestoreEntityContainer or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.FeaturestoreEntityContainer, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturestoreEntityContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Is either a FeaturestoreEntityContainer type + or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturestoreEntityContainer or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturestoreEntityContainer] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("FeaturestoreEntityContainer", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_featurestore_entity_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_featurestore_entity_versions_operations.py new file mode 100644 index 000000000000..65885ab1ab71 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_featurestore_entity_versions_operations.py @@ -0,0 +1,681 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._featurestore_entity_versions_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class FeaturestoreEntityVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.MachineLearningServicesMgmtClient`'s + :attr:`featurestore_entity_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, + version_name: Optional[str] = None, + version: Optional[str] = None, + description: Optional[str] = None, + created_by: Optional[str] = None, + stage: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_models.FeaturestoreEntityVersion"]: + """List versions. + + List versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Feature entity name. This is case-sensitive. Required. + :type name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :type tags: str + :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param page_size: page size. Default value is 20. + :type page_size: int + :param version_name: name for the featurestore entity version. Default value is None. + :type version_name: str + :param version: featurestore entity version. Default value is None. + :type version: str + :param description: description for the feature entity version. Default value is None. + :type description: str + :param created_by: createdBy user name. Default value is None. + :type created_by: str + :param stage: Specifies the featurestore stage. Default value is None. + :type stage: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either FeaturestoreEntityVersion or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturestoreEntityVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + skip=skip, + tags=tags, + list_view_type=list_view_type, + page_size=page_size, + version_name=version_name, + version=version, + description=description, + created_by=created_by, + stage=stage, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("FeaturestoreEntityVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions" + } + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}" + } + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}" + } + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.FeaturestoreEntityVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FeaturestoreEntityVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturestoreEntityVersion] = kwargs.pop("cls", None) + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("FeaturestoreEntityVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}" + } + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.FeaturestoreEntityVersion, IO], + **kwargs: Any + ) -> _models.FeaturestoreEntityVersion: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturestoreEntityVersion] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "FeaturestoreEntityVersion") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("FeaturestoreEntityVersion", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("FeaturestoreEntityVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}" + } + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.FeaturestoreEntityVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturestoreEntityVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturestoreEntityVersion or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturestoreEntityVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturestoreEntityVersion or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.FeaturestoreEntityVersion, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturestoreEntityVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Is either a FeaturestoreEntityVersion type or + a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturestoreEntityVersion or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturestoreEntityVersion] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("FeaturestoreEntityVersion", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_inference_endpoints_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_inference_endpoints_operations.py new file mode 100644 index 000000000000..c0223a41b3d8 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_inference_endpoints_operations.py @@ -0,0 +1,865 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._inference_endpoints_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, + build_update_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class InferenceEndpointsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.MachineLearningServicesMgmtClient`'s + :attr:`inference_endpoints` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + group_name: str, + count: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + order_by: Optional[Union[str, _models.OrderString]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.InferenceEndpointMinimalTrackedResource"]: + """List Inference Endpoints. + + List Inference Endpoints. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: Name of the InferencePool. Required. + :type pool_name: str + :param group_name: InferenceGroup name. Required. + :type group_name: str + :param count: Number of InferenceEndpoint to be retrieved in a page of results. Default value + is None. + :type count: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: A set of tags with which to filter the returned models. It is a comma separated + string of tags key or tags key=value. Example: tagKey1,tagKey2,tagKey3=value3 . Default value + is None. + :type tags: str + :param properties: A set of properties with which to filter the returned models. It is a comma + separated string of properties key and/or properties key=value Example: + propKey1,propKey2,propKey3=value3 . Default value is None. + :type properties: str + :param order_by: The option to order the response. Known values are: "CreatedAtDesc", + "CreatedAtAsc", "UpdatedAtDesc", and "UpdatedAtAsc". Default value is None. + :type order_by: str or ~azure.mgmt.machinelearningservices.models.OrderString + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either InferenceEndpointMinimalTrackedResource or the + result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.InferenceEndpointMinimalTrackedResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.InferenceEndpointMinimalTrackedResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + group_name=group_name, + subscription_id=self._config.subscription_id, + count=count, + skip=skip, + tags=tags, + properties=properties, + order_by=order_by, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize( + "InferenceEndpointMinimalTrackedResourceArmPaginatedResult", pipeline_response + ) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{groupName}/endpoints" + } + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, pool_name: str, group_name: str, name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + group_name=group_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{groupName}/endpoints/{name}" + } + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, workspace_name: str, pool_name: str, group_name: str, name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete InferenceEndpoint (asynchronous). + + Delete InferenceEndpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param group_name: InferenceGroup name. Required. + :type group_name: str + :param name: InferenceEndpoint name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + group_name=group_name, + name=name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{groupName}/endpoints/{name}" + } + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, pool_name: str, group_name: str, name: str, **kwargs: Any + ) -> _models.InferenceEndpointMinimalTrackedResource: + """Get InferenceEndpoint. + + Get InferenceEndpoint. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param group_name: InferenceGroup name. Required. + :type group_name: str + :param name: InferenceEndpoint name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: InferenceEndpointMinimalTrackedResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.InferenceEndpointMinimalTrackedResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.InferenceEndpointMinimalTrackedResource] = kwargs.pop("cls", None) + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + group_name=group_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("InferenceEndpointMinimalTrackedResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{groupName}/endpoints/{name}" + } + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + group_name: str, + name: str, + body: Any, + **kwargs: Any + ) -> Optional[_models.InferenceEndpointMinimalTrackedResource]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[Optional[_models.InferenceEndpointMinimalTrackedResource]] = kwargs.pop("cls", None) + + _json = self._serialize.body(body, "object") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + group_name=group_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("InferenceEndpointMinimalTrackedResource", pipeline_response) + + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{groupName}/endpoints/{name}" + } + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + group_name: str, + name: str, + body: Any, + **kwargs: Any + ) -> AsyncLROPoller[_models.InferenceEndpointMinimalTrackedResource]: + """Update InferenceEndpoint (asynchronous). + + Update InferenceEndpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param group_name: InferenceGroup name. Required. + :type group_name: str + :param name: InferenceEndpoint name. Required. + :type name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: any + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either + InferenceEndpointMinimalTrackedResource or the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.InferenceEndpointMinimalTrackedResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[_models.InferenceEndpointMinimalTrackedResource] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + group_name=group_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("InferenceEndpointMinimalTrackedResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{groupName}/endpoints/{name}" + } + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + group_name: str, + name: str, + body: Union[_models.InferenceEndpointMinimalTrackedResource, IO], + **kwargs: Any + ) -> _models.InferenceEndpointMinimalTrackedResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.InferenceEndpointMinimalTrackedResource] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "InferenceEndpointMinimalTrackedResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + group_name=group_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("InferenceEndpointMinimalTrackedResource", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("InferenceEndpointMinimalTrackedResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{groupName}/endpoints/{name}" + } + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + group_name: str, + name: str, + body: _models.InferenceEndpointMinimalTrackedResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.InferenceEndpointMinimalTrackedResource]: + """Create or update InferenceEndpoint (asynchronous). + + Create or update InferenceEndpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param group_name: InferenceGroup name. Required. + :type group_name: str + :param name: InferenceEndpoint name. Required. + :type name: str + :param body: InferenceEndpoint entity to apply during operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.InferenceEndpointMinimalTrackedResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either + InferenceEndpointMinimalTrackedResource or the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.InferenceEndpointMinimalTrackedResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + group_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.InferenceEndpointMinimalTrackedResource]: + """Create or update InferenceEndpoint (asynchronous). + + Create or update InferenceEndpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param group_name: InferenceGroup name. Required. + :type group_name: str + :param name: InferenceEndpoint name. Required. + :type name: str + :param body: InferenceEndpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either + InferenceEndpointMinimalTrackedResource or the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.InferenceEndpointMinimalTrackedResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + group_name: str, + name: str, + body: Union[_models.InferenceEndpointMinimalTrackedResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.InferenceEndpointMinimalTrackedResource]: + """Create or update InferenceEndpoint (asynchronous). + + Create or update InferenceEndpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param group_name: InferenceGroup name. Required. + :type group_name: str + :param name: InferenceEndpoint name. Required. + :type name: str + :param body: InferenceEndpoint entity to apply during operation. Is either a + InferenceEndpointMinimalTrackedResource type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.InferenceEndpointMinimalTrackedResource + or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either + InferenceEndpointMinimalTrackedResource or the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.InferenceEndpointMinimalTrackedResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.InferenceEndpointMinimalTrackedResource] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + group_name=group_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("InferenceEndpointMinimalTrackedResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{groupName}/endpoints/{name}" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_inference_groups_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_inference_groups_operations.py new file mode 100644 index 000000000000..d8b87ad9dc8c --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_inference_groups_operations.py @@ -0,0 +1,1128 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._inference_groups_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_get_status_request, + build_list_request, + build_list_skus_request, + build_update_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class InferenceGroupsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.MachineLearningServicesMgmtClient`'s + :attr:`inference_groups` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + count: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + order_by: Optional[Union[str, _models.OrderString]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.InferenceGroupMinimalTrackedResourceWithSku"]: + """List Inference Groups. + + List Inference Groups. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: Name of the InferencePool. Required. + :type pool_name: str + :param count: Number of InferenceGroup to be retrieved in a page of results. Default value is + None. + :type count: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: A set of tags with which to filter the returned models. It is a comma separated + string of tags key or tags key=value. Example: tagKey1,tagKey2,tagKey3=value3 . Default value + is None. + :type tags: str + :param properties: A set of properties with which to filter the returned models. It is a comma + separated string of properties key and/or properties key=value Example: + propKey1,propKey2,propKey3=value3 . Default value is None. + :type properties: str + :param order_by: The option to order the response. Known values are: "CreatedAtDesc", + "CreatedAtAsc", "UpdatedAtDesc", and "UpdatedAtAsc". Default value is None. + :type order_by: str or ~azure.mgmt.machinelearningservices.models.OrderString + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either InferenceGroupMinimalTrackedResourceWithSku or the + result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.InferenceGroupMinimalTrackedResourceWithSku] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.InferenceGroupMinimalTrackedResourceWithSkuArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + subscription_id=self._config.subscription_id, + count=count, + skip=skip, + tags=tags, + properties=properties, + order_by=order_by, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize( + "InferenceGroupMinimalTrackedResourceWithSkuArmPaginatedResult", pipeline_response + ) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups" + } + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, pool_name: str, name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}" + } + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, workspace_name: str, pool_name: str, name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete InferenceGroup (asynchronous). + + Delete InferenceGroup (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param name: InferenceGroup name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + name=name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}" + } + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, pool_name: str, name: str, **kwargs: Any + ) -> _models.InferenceGroupMinimalTrackedResourceWithSku: + """Get InferenceGroup. + + Get InferenceGroup. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param name: InferenceGroup name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: InferenceGroupMinimalTrackedResourceWithSku or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.InferenceGroupMinimalTrackedResourceWithSku + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.InferenceGroupMinimalTrackedResourceWithSku] = kwargs.pop("cls", None) + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("InferenceGroupMinimalTrackedResourceWithSku", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}" + } + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + name: str, + body: Union[_models.PartialMinimalTrackedResourceWithSku, IO], + **kwargs: Any + ) -> Optional[_models.InferenceGroupMinimalTrackedResourceWithSku]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.InferenceGroupMinimalTrackedResourceWithSku]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialMinimalTrackedResourceWithSku") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("InferenceGroupMinimalTrackedResourceWithSku", pipeline_response) + + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}" + } + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + name: str, + body: _models.PartialMinimalTrackedResourceWithSku, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.InferenceGroupMinimalTrackedResourceWithSku]: + """Update InferenceGroup (asynchronous). + + Update InferenceGroup (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param name: InferenceGroup name. Required. + :type name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSku + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either + InferenceGroupMinimalTrackedResourceWithSku or the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.InferenceGroupMinimalTrackedResourceWithSku] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.InferenceGroupMinimalTrackedResourceWithSku]: + """Update InferenceGroup (asynchronous). + + Update InferenceGroup (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param name: InferenceGroup name. Required. + :type name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either + InferenceGroupMinimalTrackedResourceWithSku or the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.InferenceGroupMinimalTrackedResourceWithSku] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + name: str, + body: Union[_models.PartialMinimalTrackedResourceWithSku, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.InferenceGroupMinimalTrackedResourceWithSku]: + """Update InferenceGroup (asynchronous). + + Update InferenceGroup (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param name: InferenceGroup name. Required. + :type name: str + :param body: Online Endpoint entity to apply during operation. Is either a + PartialMinimalTrackedResourceWithSku type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSku or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either + InferenceGroupMinimalTrackedResourceWithSku or the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.InferenceGroupMinimalTrackedResourceWithSku] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.InferenceGroupMinimalTrackedResourceWithSku] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("InferenceGroupMinimalTrackedResourceWithSku", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}" + } + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + name: str, + body: Union[_models.InferenceGroupMinimalTrackedResourceWithSku, IO], + **kwargs: Any + ) -> _models.InferenceGroupMinimalTrackedResourceWithSku: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.InferenceGroupMinimalTrackedResourceWithSku] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "InferenceGroupMinimalTrackedResourceWithSku") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("InferenceGroupMinimalTrackedResourceWithSku", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("InferenceGroupMinimalTrackedResourceWithSku", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}" + } + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + name: str, + body: _models.InferenceGroupMinimalTrackedResourceWithSku, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.InferenceGroupMinimalTrackedResourceWithSku]: + """Create or update InferenceGroup (asynchronous). + + Create or update InferenceGroup (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param name: InferenceGroup name. Required. + :type name: str + :param body: InferenceGroup entity to apply during operation. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.InferenceGroupMinimalTrackedResourceWithSku + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either + InferenceGroupMinimalTrackedResourceWithSku or the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.InferenceGroupMinimalTrackedResourceWithSku] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.InferenceGroupMinimalTrackedResourceWithSku]: + """Create or update InferenceGroup (asynchronous). + + Create or update InferenceGroup (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param name: InferenceGroup name. Required. + :type name: str + :param body: InferenceGroup entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either + InferenceGroupMinimalTrackedResourceWithSku or the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.InferenceGroupMinimalTrackedResourceWithSku] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + name: str, + body: Union[_models.InferenceGroupMinimalTrackedResourceWithSku, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.InferenceGroupMinimalTrackedResourceWithSku]: + """Create or update InferenceGroup (asynchronous). + + Create or update InferenceGroup (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param name: InferenceGroup name. Required. + :type name: str + :param body: InferenceGroup entity to apply during operation. Is either a + InferenceGroupMinimalTrackedResourceWithSku type or a IO type. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.InferenceGroupMinimalTrackedResourceWithSku or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either + InferenceGroupMinimalTrackedResourceWithSku or the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.InferenceGroupMinimalTrackedResourceWithSku] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.InferenceGroupMinimalTrackedResourceWithSku] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("InferenceGroupMinimalTrackedResourceWithSku", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}" + } + + @distributed_trace + def list_skus( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + name: str, + count: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_models.SkuResource"]: + """List Inference Group Skus. + + List Inference Group Skus. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: Inference Pool name. Required. + :type pool_name: str + :param name: Inference Group name. Required. + :type name: str + :param count: Number of Skus to be retrieved in a page of results. Default value is None. + :type count: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either SkuResource or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.SkuResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.SkuResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_skus_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + name=name, + subscription_id=self._config.subscription_id, + count=count, + skip=skip, + api_version=api_version, + template_url=self.list_skus.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("SkuResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list_skus.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}/skus" + } + + @distributed_trace_async + async def get_status( + self, resource_group_name: str, workspace_name: str, pool_name: str, name: str, **kwargs: Any + ) -> _models.GroupStatus: + """Retrieve inference group status. + + Retrieve inference group status. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param name: InferenceGroup name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GroupStatus or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.GroupStatus + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.GroupStatus] = kwargs.pop("cls", None) + + request = build_get_status_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_status.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("GroupStatus", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_status.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}/status" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_inference_pools_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_inference_pools_operations.py new file mode 100644 index 000000000000..1cac6677ab1f --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_inference_pools_operations.py @@ -0,0 +1,1083 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._inference_pools_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_get_status_request, + build_list_request, + build_list_skus_request, + build_update_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class InferencePoolsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.MachineLearningServicesMgmtClient`'s + :attr:`inference_pools` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + count: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + order_by: Optional[Union[str, _models.OrderString]] = None, + **kwargs: Any + ) -> AsyncIterable["_models.InferencePool"]: + """List InferencePools. + + List InferencePools. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param count: Number of inferencePools to be retrieved in a page of results. Default value is + None. + :type count: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: A set of tags with which to filter the returned models. It is a comma separated + string of tags key or tags key=value. Example: tagKey1,tagKey2,tagKey3=value3 . Default value + is None. + :type tags: str + :param properties: A set of properties with which to filter the returned models. It is a comma + separated string of properties key and/or properties key=value Example: + propKey1,propKey2,propKey3=value3 . Default value is None. + :type properties: str + :param order_by: The option to order the response. Known values are: "CreatedAtDesc", + "CreatedAtAsc", "UpdatedAtDesc", and "UpdatedAtAsc". Default value is None. + :type order_by: str or ~azure.mgmt.machinelearningservices.models.OrderString + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either InferencePool or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.InferencePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.InferencePoolTrackedResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + count=count, + skip=skip, + tags=tags, + properties=properties, + order_by=order_by, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("InferencePoolTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools" + } + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}" + } + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete InferencePool (asynchronous). + + Delete InferencePool (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: InferencePool name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}" + } + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.InferencePool: + """Get InferencePool. + + Get InferencePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: InferencePool name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: InferencePool or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.InferencePool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.InferencePool] = kwargs.pop("cls", None) + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("InferencePool", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}" + } + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.PartialMinimalTrackedResourceWithSkuAndIdentity, IO], + **kwargs: Any + ) -> Optional[_models.InferencePool]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.InferencePool]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialMinimalTrackedResourceWithSkuAndIdentity") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("InferencePool", pipeline_response) + + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}" + } + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.PartialMinimalTrackedResourceWithSkuAndIdentity, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.InferencePool]: + """Update InferencePool (asynchronous). + + Update InferencePool (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: InferencePool name. Required. + :type name: str + :param body: Inference Pool entity to apply during operation. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSkuAndIdentity + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either InferencePool or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.InferencePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.InferencePool]: + """Update InferencePool (asynchronous). + + Update InferencePool (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: InferencePool name. Required. + :type name: str + :param body: Inference Pool entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either InferencePool or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.InferencePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.PartialMinimalTrackedResourceWithSkuAndIdentity, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.InferencePool]: + """Update InferencePool (asynchronous). + + Update InferencePool (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: InferencePool name. Required. + :type name: str + :param body: Inference Pool entity to apply during operation. Is either a + PartialMinimalTrackedResourceWithSkuAndIdentity type or a IO type. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSkuAndIdentity or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either InferencePool or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.InferencePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.InferencePool] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("InferencePool", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}" + } + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.InferencePool, IO], + **kwargs: Any + ) -> _models.InferencePool: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.InferencePool] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "InferencePool") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("InferencePool", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("InferencePool", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}" + } + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.InferencePool, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.InferencePool]: + """Create or update InferencePool (asynchronous). + + Create or update InferencePool (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: InferencePool name. Required. + :type name: str + :param body: InferencePool entity to apply during operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.InferencePool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either InferencePool or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.InferencePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.InferencePool]: + """Create or update InferencePool (asynchronous). + + Create or update InferencePool (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: InferencePool name. Required. + :type name: str + :param body: InferencePool entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either InferencePool or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.InferencePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.InferencePool, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.InferencePool]: + """Create or update InferencePool (asynchronous). + + Create or update InferencePool (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: InferencePool name. Required. + :type name: str + :param body: InferencePool entity to apply during operation. Is either a InferencePool type or + a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.InferencePool or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either InferencePool or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.InferencePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.InferencePool] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("InferencePool", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}" + } + + @distributed_trace + def list_skus( + self, + resource_group_name: str, + workspace_name: str, + name: str, + count: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_models.SkuResource"]: + """List Inference Pool Skus. + + List Inference Pool Skus. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Inference Group name. Required. + :type name: str + :param count: Number of Skus to be retrieved in a page of results. Default value is None. + :type count: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either SkuResource or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.SkuResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.SkuResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_skus_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + count=count, + skip=skip, + api_version=api_version, + template_url=self.list_skus.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("SkuResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list_skus.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}/skus" + } + + @distributed_trace_async + async def get_status( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.PoolStatus: + """Retrieve inference pool status. + + Retrieve inference pool status. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: InferencePool name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PoolStatus or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PoolStatus + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.PoolStatus] = kwargs.pop("cls", None) + + request = build_get_status_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_status.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("PoolStatus", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_status.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}/status" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_jobs_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_jobs_operations.py index 8196b0804d30..316f54fb9024 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_jobs_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_jobs_operations.py @@ -37,6 +37,7 @@ build_delete_request, build_get_request, build_list_request, + build_update_request, ) T = TypeVar("T") @@ -71,6 +72,10 @@ def list( job_type: Optional[str] = None, tag: Optional[str] = None, list_view_type: Optional[Union[str, _models.ListViewType]] = None, + asset_name: Optional[str] = None, + scheduled: Optional[bool] = None, + schedule_id: Optional[str] = None, + properties: Optional[str] = None, **kwargs: Any ) -> AsyncIterable["_models.JobBase"]: """Lists Jobs in the workspace. @@ -91,6 +96,15 @@ def list( :param list_view_type: View type for including/excluding (for example) archived entities. Known values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param asset_name: Asset name the job's named output is registered with. Default value is None. + :type asset_name: str + :param scheduled: Indicator whether the job is scheduled job. Default value is None. + :type scheduled: bool + :param schedule_id: The scheduled id for listing the job triggered from. Default value is None. + :type schedule_id: str + :param properties: Comma-separated list of property names (and optionally values). Example: + prop1,prop2=value2. Default value is None. + :type properties: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either JobBase or the result of cls(response) :rtype: @@ -122,6 +136,10 @@ def prepare_request(next_link=None): job_type=job_type, tag=tag, list_view_type=list_view_type, + asset_name=asset_name, + scheduled=scheduled, + schedule_id=schedule_id, + properties=properties, api_version=api_version, template_url=self.list.metadata["url"], headers=_headers, @@ -375,6 +393,165 @@ async def get(self, resource_group_name: str, workspace_name: str, id: str, **kw "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}" } + @overload + async def update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.PartialJobBasePartialResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.JobBase: + """Updates a Job. + + Updates a Job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :param body: Job definition to apply during the operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialJobBasePartialResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.JobBase: + """Updates a Job. + + Updates a Job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :param body: Job definition to apply during the operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.PartialJobBasePartialResource, IO], + **kwargs: Any + ) -> _models.JobBase: + """Updates a Job. + + Updates a Job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :param body: Job definition to apply during the operation. Is either a + PartialJobBasePartialResource type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialJobBasePartialResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.JobBase] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialJobBasePartialResource") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("JobBase", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}" + } + @overload async def create_or_update( self, diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_labeling_jobs_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_labeling_jobs_operations.py new file mode 100644 index 000000000000..32bb9cf7a148 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_labeling_jobs_operations.py @@ -0,0 +1,1027 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._labeling_jobs_operations import ( + build_create_or_update_request, + build_delete_request, + build_export_labels_request, + build_get_request, + build_list_request, + build_pause_request, + build_resume_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class LabelingJobsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.MachineLearningServicesMgmtClient`'s + :attr:`labeling_jobs` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + top: Optional[int] = None, + **kwargs: Any + ) -> AsyncIterable["_models.LabelingJob"]: + """Lists labeling jobs in the workspace. + + Lists labeling jobs in the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param top: Number of labeling jobs to return. Default value is None. + :type top: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either LabelingJob or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.LabelingJobResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + top=top, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("LabelingJobResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs" + } + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> None: + """Delete a labeling job. + + Delete a labeling job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}" + } + + @distributed_trace_async + async def get(self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any) -> _models.LabelingJob: + """Gets a labeling job by name/id. + + Gets a labeling job by name/id. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LabelingJob or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.LabelingJob + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.LabelingJob] = kwargs.pop("cls", None) + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("LabelingJob", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}" + } + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.LabelingJob, IO], + **kwargs: Any + ) -> _models.LabelingJob: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.LabelingJob] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "LabelingJob") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("LabelingJob", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("LabelingJob", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}" + } + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.LabelingJob, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.LabelingJob]: + """Creates or updates a labeling job (asynchronous). + + Creates or updates a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: LabelingJob definition object. Required. + :type body: ~azure.mgmt.machinelearningservices.models.LabelingJob + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either LabelingJob or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.LabelingJob]: + """Creates or updates a labeling job (asynchronous). + + Creates or updates a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: LabelingJob definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either LabelingJob or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.LabelingJob, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.LabelingJob]: + """Creates or updates a labeling job (asynchronous). + + Creates or updates a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: LabelingJob definition object. Is either a LabelingJob type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.LabelingJob or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either LabelingJob or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.LabelingJob] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("LabelingJob", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}" + } + + async def _export_labels_initial( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.ExportSummary, IO], + **kwargs: Any + ) -> Optional[_models.ExportSummary]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.ExportSummary]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ExportSummary") + + request = build_export_labels_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._export_labels_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("ExportSummary", pipeline_response) + + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _export_labels_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels" + } + + @overload + async def begin_export_labels( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.ExportSummary, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ExportSummary]: + """Export labels from a labeling job (asynchronous). Using the URL in the Location header, the + status of the job export operation can be tracked. + + Export labels from a labeling job (asynchronous). Using the URL in the Location header, the + status of the job export operation can be tracked. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: The export summary. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ExportSummary + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ExportSummary or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ExportSummary] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_export_labels( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ExportSummary]: + """Export labels from a labeling job (asynchronous). Using the URL in the Location header, the + status of the job export operation can be tracked. + + Export labels from a labeling job (asynchronous). Using the URL in the Location header, the + status of the job export operation can be tracked. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: The export summary. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ExportSummary or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ExportSummary] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_export_labels( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.ExportSummary, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.ExportSummary]: + """Export labels from a labeling job (asynchronous). Using the URL in the Location header, the + status of the job export operation can be tracked. + + Export labels from a labeling job (asynchronous). Using the URL in the Location header, the + status of the job export operation can be tracked. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: The export summary. Is either a ExportSummary type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ExportSummary or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ExportSummary or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ExportSummary] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ExportSummary] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._export_labels_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ExportSummary", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_export_labels.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels" + } + + @distributed_trace_async + async def pause( + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> _models.LabelingJobProperties: + """Pause a labeling job. + + Pause a labeling job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LabelingJobProperties or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.LabelingJobProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.LabelingJobProperties] = kwargs.pop("cls", None) + + request = build_pause_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.pause.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("LabelingJobProperties", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + pause.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/pause" + } + + async def _resume_initial( + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> Optional[_models.LabelingJobProperties]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Optional[_models.LabelingJobProperties]] = kwargs.pop("cls", None) + + request = build_resume_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._resume_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("LabelingJobProperties", pipeline_response) + + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _resume_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume" + } + + @distributed_trace_async + async def begin_resume( + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> AsyncLROPoller[_models.LabelingJobProperties]: + """Resume a labeling job (asynchronous). + + Resume a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either LabelingJobProperties or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.LabelingJobProperties] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.LabelingJobProperties] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._resume_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("LabelingJobProperties", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_resume.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_managed_network_provisions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_managed_network_provisions_operations.py new file mode 100644 index 000000000000..9a5995c4287b --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_managed_network_provisions_operations.py @@ -0,0 +1,299 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._managed_network_provisions_operations import build_provision_managed_network_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class ManagedNetworkProvisionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.MachineLearningServicesMgmtClient`'s + :attr:`managed_network_provisions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + async def _provision_managed_network_initial( + self, + resource_group_name: str, + workspace_name: str, + body: Optional[Union[_models.ManagedNetworkProvisionOptions, IO]] = None, + **kwargs: Any + ) -> Optional[_models.ManagedNetworkProvisionStatus]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.ManagedNetworkProvisionStatus]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + if body is not None: + _json = self._serialize.body(body, "ManagedNetworkProvisionOptions") + else: + _json = None + + request = build_provision_managed_network_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._provision_managed_network_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("ManagedNetworkProvisionStatus", pipeline_response) + + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _provision_managed_network_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/provisionManagedNetwork" + } + + @overload + async def begin_provision_managed_network( + self, + resource_group_name: str, + workspace_name: str, + body: Optional[_models.ManagedNetworkProvisionOptions] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ManagedNetworkProvisionStatus]: + """Provisions the managed network of a machine learning workspace. + + Provisions the managed network of a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param body: Managed Network Provisioning Options for a machine learning workspace. Default + value is None. + :type body: ~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionOptions + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ManagedNetworkProvisionStatus or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_provision_managed_network( + self, + resource_group_name: str, + workspace_name: str, + body: Optional[IO] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ManagedNetworkProvisionStatus]: + """Provisions the managed network of a machine learning workspace. + + Provisions the managed network of a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param body: Managed Network Provisioning Options for a machine learning workspace. Default + value is None. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ManagedNetworkProvisionStatus or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_provision_managed_network( + self, + resource_group_name: str, + workspace_name: str, + body: Optional[Union[_models.ManagedNetworkProvisionOptions, IO]] = None, + **kwargs: Any + ) -> AsyncLROPoller[_models.ManagedNetworkProvisionStatus]: + """Provisions the managed network of a machine learning workspace. + + Provisions the managed network of a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param body: Managed Network Provisioning Options for a machine learning workspace. Is either a + ManagedNetworkProvisionOptions type or a IO type. Default value is None. + :type body: ~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionOptions or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ManagedNetworkProvisionStatus or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ManagedNetworkProvisionStatus] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._provision_managed_network_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ManagedNetworkProvisionStatus", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_provision_managed_network.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/provisionManagedNetwork" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_managed_network_settings_rule_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_managed_network_settings_rule_operations.py new file mode 100644 index 000000000000..a8e926be6b60 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_managed_network_settings_rule_operations.py @@ -0,0 +1,606 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._managed_network_settings_rule_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class ManagedNetworkSettingsRuleOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.MachineLearningServicesMgmtClient`'s + :attr:`managed_network_settings_rule` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncIterable["_models.OutboundRuleBasicResource"]: + """Lists the managed network outbound rules for a machine learning workspace. + + Lists the managed network outbound rules for a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OutboundRuleBasicResource or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.OutboundRuleListResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("OutboundRuleListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules" + } + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, rule_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + rule_name=rule_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}" + } + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, workspace_name: str, rule_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Deletes an outbound rule from the managed network of a machine learning workspace. + + Deletes an outbound rule from the managed network of a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param rule_name: Name of the workspace managed network outbound rule. Required. + :type rule_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + rule_name=rule_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}" + } + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, rule_name: str, **kwargs: Any + ) -> _models.OutboundRuleBasicResource: + """Gets an outbound rule from the managed network of a machine learning workspace. + + Gets an outbound rule from the managed network of a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param rule_name: Name of the workspace managed network outbound rule. Required. + :type rule_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: OutboundRuleBasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.OutboundRuleBasicResource] = kwargs.pop("cls", None) + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + rule_name=rule_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("OutboundRuleBasicResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}" + } + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + rule_name: str, + body: Union[_models.OutboundRuleBasicResource, IO], + **kwargs: Any + ) -> Optional[_models.OutboundRuleBasicResource]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.OutboundRuleBasicResource]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "OutboundRuleBasicResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + rule_name=rule_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("OutboundRuleBasicResource", pipeline_response) + + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}" + } + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + rule_name: str, + body: _models.OutboundRuleBasicResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.OutboundRuleBasicResource]: + """Creates or updates an outbound rule in the managed network of a machine learning workspace. + + Creates or updates an outbound rule in the managed network of a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param rule_name: Name of the workspace managed network outbound rule. Required. + :type rule_name: str + :param body: Outbound Rule to be created or updated in the managed network of a machine + learning workspace. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OutboundRuleBasicResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + rule_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.OutboundRuleBasicResource]: + """Creates or updates an outbound rule in the managed network of a machine learning workspace. + + Creates or updates an outbound rule in the managed network of a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param rule_name: Name of the workspace managed network outbound rule. Required. + :type rule_name: str + :param body: Outbound Rule to be created or updated in the managed network of a machine + learning workspace. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OutboundRuleBasicResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + rule_name: str, + body: Union[_models.OutboundRuleBasicResource, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.OutboundRuleBasicResource]: + """Creates or updates an outbound rule in the managed network of a machine learning workspace. + + Creates or updates an outbound rule in the managed network of a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param rule_name: Name of the workspace managed network outbound rule. Required. + :type rule_name: str + :param body: Outbound Rule to be created or updated in the managed network of a machine + learning workspace. Is either a OutboundRuleBasicResource type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OutboundRuleBasicResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.OutboundRuleBasicResource] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + rule_name=rule_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("OutboundRuleBasicResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_model_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_model_versions_operations.py index 8d601cec0072..f26a3b34c85e 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_model_versions_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_model_versions_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -21,11 +21,13 @@ ) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request @@ -34,6 +36,7 @@ build_delete_request, build_get_request, build_list_request, + build_package_request, ) T = TypeVar("T") @@ -75,6 +78,7 @@ def list( properties: Optional[str] = None, feed: Optional[str] = None, list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, **kwargs: Any ) -> AsyncIterable["_models.ModelVersion"]: """List model versions. @@ -111,6 +115,8 @@ def list( :param list_view_type: View type for including/excluding (for example) archived entities. Known values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param stage: Model stage. Default value is None. + :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ModelVersion or the result of cls(response) :rtype: @@ -149,6 +155,7 @@ def prepare_request(next_link=None): properties=properties, feed=feed, list_view_type=list_view_type, + stage=stage, api_version=api_version, template_url=self.list.metadata["url"], headers=_headers, @@ -518,3 +525,267 @@ async def create_or_update( create_or_update.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}" } + + async def _package_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.PackageRequest, IO], + **kwargs: Any + ) -> Optional[_models.PackageResponse]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.PackageResponse]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PackageRequest") + + request = build_package_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._package_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("PackageResponse", pipeline_response) + + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _package_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}/package" + } + + @overload + async def begin_package( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.PackageRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.PackageResponse]: + """Model Version Package operation. + + Model Version Package operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Package operation request body. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PackageRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either PackageResponse or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_package( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.PackageResponse]: + """Model Version Package operation. + + Model Version Package operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Package operation request body. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either PackageResponse or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_package( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.PackageRequest, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.PackageResponse]: + """Model Version Package operation. + + Model Version Package operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Package operation request body. Is either a PackageRequest type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.PackageRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either PackageResponse or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PackageResponse] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._package_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("PackageResponse", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_package.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}/package" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_operations.py index 3edc6c4bb6de..6404d73095fa 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_operations.py @@ -56,6 +56,8 @@ def __init__(self, *args, **kwargs) -> None: def list(self, **kwargs: Any) -> AsyncIterable["_models.AmlOperation"]: """Lists all of the available Azure Machine Learning Workspaces REST API operations. + Lists all of the available Azure Machine Learning Workspaces REST API operations. + :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either AmlOperation or the result of cls(response) :rtype: diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py index 7a7f0546597b..1bc1dfbc04da 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py @@ -63,7 +63,9 @@ def __init__(self, *args, **kwargs) -> None: def list( self, resource_group_name: str, workspace_name: str, **kwargs: Any ) -> AsyncIterable["_models.PrivateEndpointConnection"]: - """List all the private endpoint connections associated with the workspace. + """Called by end-users to get all PE connections. + + Called by end-users to get all PE connections. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -153,19 +155,86 @@ async def get_next(next_link=None): "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections" } + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> None: + """Called by end-users to delete a PE connection. + + Called by end-users to delete a PE connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. Required. + :type private_endpoint_connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}" + } + @distributed_trace_async async def get( self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any ) -> _models.PrivateEndpointConnection: - """Gets the specified private endpoint connection associated with the workspace. + """Called by end-users to get a PE connection. + + Called by end-users to get a PE connection. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the workspace. Required. + :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. Required. :type private_endpoint_connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: PrivateEndpointConnection or the result of cls(response) @@ -228,23 +297,26 @@ async def create_or_update( resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, - properties: _models.PrivateEndpointConnection, + body: _models.PrivateEndpointConnection, *, content_type: str = "application/json", **kwargs: Any ) -> _models.PrivateEndpointConnection: - """Update the state of specified private endpoint connection associated with the workspace. + """Called by end-users to approve or reject a PE connection. + This method must validate and forward the call to NRP. + + Called by end-users to approve or reject a PE connection. + This method must validate and forward the call to NRP. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the workspace. Required. + :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. Required. :type private_endpoint_connection_name: str - :param properties: The private endpoint connection properties. Required. - :type properties: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection + :param body: PrivateEndpointConnection object. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -260,23 +332,26 @@ async def create_or_update( resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, - properties: IO, + body: IO, *, content_type: str = "application/json", **kwargs: Any ) -> _models.PrivateEndpointConnection: - """Update the state of specified private endpoint connection associated with the workspace. + """Called by end-users to approve or reject a PE connection. + This method must validate and forward the call to NRP. + + Called by end-users to approve or reject a PE connection. + This method must validate and forward the call to NRP. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the workspace. Required. + :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. Required. :type private_endpoint_connection_name: str - :param properties: The private endpoint connection properties. Required. - :type properties: IO + :param body: PrivateEndpointConnection object. Required. + :type body: IO :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str @@ -292,22 +367,25 @@ async def create_or_update( resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, - properties: Union[_models.PrivateEndpointConnection, IO], + body: Union[_models.PrivateEndpointConnection, IO], **kwargs: Any ) -> _models.PrivateEndpointConnection: - """Update the state of specified private endpoint connection associated with the workspace. + """Called by end-users to approve or reject a PE connection. + This method must validate and forward the call to NRP. + + Called by end-users to approve or reject a PE connection. + This method must validate and forward the call to NRP. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the workspace. Required. + :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. Required. :type private_endpoint_connection_name: str - :param properties: The private endpoint connection properties. Is either a - PrivateEndpointConnection type or a IO type. Required. - :type properties: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection or IO + :param body: PrivateEndpointConnection object. Is either a PrivateEndpointConnection type or a + IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. Default value is None. :paramtype content_type: str @@ -334,10 +412,10 @@ async def create_or_update( content_type = content_type or "application/json" _json = None _content = None - if isinstance(properties, (IOBase, bytes)): - _content = properties + if isinstance(body, (IOBase, bytes)): + _content = body else: - _json = self._serialize.body(properties, "PrivateEndpointConnection") + _json = self._serialize.body(body, "PrivateEndpointConnection") request = build_create_or_update_request( resource_group_name=resource_group_name, @@ -377,68 +455,3 @@ async def create_or_update( create_or_update.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}" } - - @distributed_trace_async - async def delete( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any - ) -> None: - """Deletes the specified private endpoint connection associated with the workspace. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. Required. - :type workspace_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the workspace. Required. - :type private_endpoint_connection_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - request = build_delete_request( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - private_endpoint_connection_name=private_endpoint_connection_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=self.delete.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}" - } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_link_resources_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_link_resources_operations.py index 3e0bfcb8b50c..1f7704e61202 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_link_resources_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_link_resources_operations.py @@ -6,8 +6,10 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +import urllib.parse +from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -19,7 +21,7 @@ from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest -from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -50,11 +52,25 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace_async - async def list( + @distributed_trace + def list( self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> _models.PrivateLinkResourceListResult: - """Gets the private link resources that need to be created for a workspace. + ) -> AsyncIterable["_models.PrivateLinkResource"]: + """Called by Client (Portal, CLI, etc) to get available "private link resources" for the + workspace. + Each "private link resource" is a connection endpoint (IP address) to the resource. + Pre single connection endpoint per workspace: the Data Plane IP address, returned by DNS + resolution. + Other RPs, such as Azure Storage, have multiple - one for Blobs, other for Queues, etc. + Defined in the "[NRP] Private Endpoint Design" doc, topic "GET API for GroupIds". + + Called by Client (Portal, CLI, etc) to get available "private link resources" for the + workspace. + Each "private link resource" is a connection endpoint (IP address) to the resource. + Pre single connection endpoint per workspace: the Data Plane IP address, returned by DNS + resolution. + Other RPs, such as Azure Storage, have multiple - one for Blobs, other for Queues, etc. + Defined in the "[NRP] Private Endpoint Design" doc, topic "GET API for GroupIds". :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -62,10 +78,17 @@ async def list( :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PrivateLinkResourceListResult or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.PrivateLinkResourceListResult + :return: An iterator like instance of either PrivateLinkResource or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PrivateLinkResource] :raises ~azure.core.exceptions.HttpResponseError: """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.PrivateLinkResourceListResult] = kwargs.pop("cls", None) + error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -74,42 +97,63 @@ async def list( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.PrivateLinkResourceListResult] = kwargs.pop("cls", None) - - request = build_list_request( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=self.list.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("PrivateLinkResourceListResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("PrivateLinkResourceListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) list.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources" diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_component_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_component_versions_operations.py index 07bb1a688545..432f1b8555a7 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_component_versions_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_component_versions_operations.py @@ -70,6 +70,7 @@ def list( order_by: Optional[str] = None, top: Optional[int] = None, skip: Optional[str] = None, + stage: Optional[str] = None, **kwargs: Any ) -> AsyncIterable["_models.ComponentVersion"]: """List versions. @@ -90,6 +91,8 @@ def list( :type top: int :param skip: Continuation token for pagination. Default value is None. :type skip: str + :param stage: Component stage. Default value is None. + :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ComponentVersion or the result of cls(response) :rtype: @@ -121,6 +124,7 @@ def prepare_request(next_link=None): order_by=order_by, top=top, skip=skip, + stage=stage, api_version=api_version, template_url=self.list.metadata["url"], headers=_headers, diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_environment_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_environment_versions_operations.py index 10560e9f5fdb..9529caa41ef1 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_environment_versions_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_environment_versions_operations.py @@ -71,6 +71,7 @@ def list( top: Optional[int] = None, skip: Optional[str] = None, list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, **kwargs: Any ) -> AsyncIterable["_models.EnvironmentVersion"]: """List versions. @@ -94,6 +95,9 @@ def list( :param list_view_type: View type for including/excluding (for example) archived entities. Known values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param stage: Stage for including/excluding (for example) archived entities. Takes priority + over listViewType. Default value is None. + :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either EnvironmentVersion or the result of cls(response) :rtype: @@ -126,6 +130,7 @@ def prepare_request(next_link=None): top=top, skip=skip, list_view_type=list_view_type, + stage=stage, api_version=api_version, template_url=self.list.metadata["url"], headers=_headers, diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_model_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_model_versions_operations.py index 512c6ba1df76..30b748212e41 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_model_versions_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_registry_model_versions_operations.py @@ -37,6 +37,7 @@ build_delete_request, build_get_request, build_list_request, + build_package_request, ) T = TypeVar("T") @@ -682,6 +683,273 @@ def get_long_running_output(pipeline_response): "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}" } + async def _package_initial( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: Union[_models.PackageRequest, IO], + **kwargs: Any + ) -> Optional[_models.PackageResponse]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.PackageResponse]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PackageRequest") + + request = build_package_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._package_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("PackageResponse", pipeline_response) + + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _package_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}/package" + } + + @overload + async def begin_package( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: _models.PackageRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.PackageResponse]: + """Model Version Package operation. + + Model Version Package operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param model_name: Container name. This is case-sensitive. Required. + :type model_name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Package operation request body. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PackageRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either PackageResponse or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_package( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.PackageResponse]: + """Model Version Package operation. + + Model Version Package operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param model_name: Container name. This is case-sensitive. Required. + :type model_name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Package operation request body. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either PackageResponse or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_package( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: Union[_models.PackageRequest, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.PackageResponse]: + """Model Version Package operation. + + Model Version Package operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param model_name: Container name. This is case-sensitive. Required. + :type model_name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Package operation request body. Is either a PackageRequest type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.PackageRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either PackageResponse or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PackageResponse] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._package_initial( + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + version=version, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("PackageResponse", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_package.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}/package" + } + @overload async def create_or_get_start_pending_upload( self, diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_serverless_endpoints_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_serverless_endpoints_operations.py new file mode 100644 index 000000000000..810f3b19169c --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_serverless_endpoints_operations.py @@ -0,0 +1,1199 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._serverless_endpoints_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_keys_request, + build_list_request, + build_regenerate_keys_request, + build_update_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class ServerlessEndpointsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.MachineLearningServicesMgmtClient`'s + :attr:`serverless_endpoints` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, workspace_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> AsyncIterable["_models.ServerlessEndpoint"]: + """List Serverless Endpoints. + + List Serverless Endpoints. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ServerlessEndpoint or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ServerlessEndpointTrackedResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ServerlessEndpointTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints" + } + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete Serverless Endpoint (asynchronous). + + Delete Serverless Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.ServerlessEndpoint: + """Get Serverless Endpoint. + + Get Serverless Endpoint. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ServerlessEndpoint or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ServerlessEndpoint + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ServerlessEndpoint] = kwargs.pop("cls", None) + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.PartialMinimalTrackedResourceWithSkuAndIdentity, IO], + **kwargs: Any + ) -> Optional[_models.ServerlessEndpoint]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.ServerlessEndpoint]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialMinimalTrackedResourceWithSkuAndIdentity") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) + + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.PartialMinimalTrackedResourceWithSkuAndIdentity, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ServerlessEndpoint]: + """Update Serverless Endpoint (asynchronous). + + Update Serverless Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: Serverless Endpoint entity to apply during operation. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSkuAndIdentity + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ServerlessEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ServerlessEndpoint]: + """Update Serverless Endpoint (asynchronous). + + Update Serverless Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: Serverless Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ServerlessEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.PartialMinimalTrackedResourceWithSkuAndIdentity, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.ServerlessEndpoint]: + """Update Serverless Endpoint (asynchronous). + + Update Serverless Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: Serverless Endpoint entity to apply during operation. Is either a + PartialMinimalTrackedResourceWithSkuAndIdentity type or a IO type. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSkuAndIdentity or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ServerlessEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ServerlessEndpoint] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.ServerlessEndpoint, IO], + **kwargs: Any + ) -> _models.ServerlessEndpoint: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ServerlessEndpoint] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ServerlessEndpoint") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.ServerlessEndpoint, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ServerlessEndpoint]: + """Create or update Serverless Endpoint (asynchronous). + + Create or update Serverless Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: Serverless Endpoint entity to apply during operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ServerlessEndpoint + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ServerlessEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ServerlessEndpoint]: + """Create or update Serverless Endpoint (asynchronous). + + Create or update Serverless Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: Serverless Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ServerlessEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.ServerlessEndpoint, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.ServerlessEndpoint]: + """Create or update Serverless Endpoint (asynchronous). + + Create or update Serverless Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: Serverless Endpoint entity to apply during operation. Is either a + ServerlessEndpoint type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ServerlessEndpoint or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ServerlessEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ServerlessEndpoint] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } + + @distributed_trace_async + async def list_keys( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.EndpointAuthKeys: + """List EndpointAuthKeys for an Endpoint using Key-based authentication. + + List EndpointAuthKeys for an Endpoint using Key-based authentication. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EndpointAuthKeys or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EndpointAuthKeys] = kwargs.pop("cls", None) + + request = build_list_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_keys.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("EndpointAuthKeys", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/listKeys" + } + + async def _regenerate_keys_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.RegenerateEndpointKeysRequest, IO], + **kwargs: Any + ) -> Optional[_models.EndpointAuthKeys]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.EndpointAuthKeys]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "RegenerateEndpointKeysRequest") + + request = build_regenerate_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._regenerate_keys_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("EndpointAuthKeys", pipeline_response) + + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _regenerate_keys_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/regenerateKeys" + } + + @overload + async def begin_regenerate_keys( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.RegenerateEndpointKeysRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.EndpointAuthKeys]: + """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: RegenerateKeys request . Required. + :type body: ~azure.mgmt.machinelearningservices.models.RegenerateEndpointKeysRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either EndpointAuthKeys or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.EndpointAuthKeys] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_regenerate_keys( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.EndpointAuthKeys]: + """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: RegenerateKeys request . Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either EndpointAuthKeys or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.EndpointAuthKeys] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_regenerate_keys( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.RegenerateEndpointKeysRequest, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.EndpointAuthKeys]: + """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: RegenerateKeys request . Is either a RegenerateEndpointKeysRequest type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.RegenerateEndpointKeysRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either EndpointAuthKeys or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.EndpointAuthKeys] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EndpointAuthKeys] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._regenerate_keys_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("EndpointAuthKeys", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_regenerate_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/regenerateKeys" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspace_connections_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspace_connections_operations.py index 1c796d31405b..c7044c756eaf 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspace_connections_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspace_connections_operations.py @@ -34,6 +34,8 @@ build_delete_request, build_get_request, build_list_request, + build_list_secrets_request, + build_update_request, ) T = TypeVar("T") @@ -59,18 +61,265 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + target: Optional[str] = None, + category: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_models.WorkspaceConnectionPropertiesV2BasicResource"]: + """Lists all the available machine learning workspaces connections under the specified workspace. + + Lists all the available machine learning workspaces connections under the specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param target: Target of the workspace connection. Default value is None. + :type target: str + :param category: Category of the workspace connection. Default value is None. + :type category: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either WorkspaceConnectionPropertiesV2BasicResource or + the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + target=target, + category=category, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize( + "WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult", pipeline_response + ) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections" + } + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, connection_name: str, **kwargs: Any + ) -> None: + """Delete machine learning workspaces connections by name. + + Delete machine learning workspaces connections by name. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}" + } + + @distributed_trace_async + async def get( + self, resource_group_name: str, workspace_name: str, connection_name: str, **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """Lists machine learning workspaces connections by name. + + Lists machine learning workspaces connections by name. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResource] = kwargs.pop("cls", None) + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("WorkspaceConnectionPropertiesV2BasicResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}" + } + @overload - async def create( + async def update( self, resource_group_name: str, workspace_name: str, connection_name: str, - parameters: _models.WorkspaceConnectionPropertiesV2BasicResource, + body: Optional[_models.WorkspaceConnectionUpdateParameter] = None, *, content_type: str = "application/json", **kwargs: Any ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: - """create. + """Update machine learning workspaces connections under the specified workspace. + + Update machine learning workspaces connections under the specified workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -79,9 +328,8 @@ async def create( :type workspace_name: str :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str - :param parameters: The object for creating or updating a new workspace connection. Required. - :type parameters: - ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :param body: Parameters for workspace connection update. Default value is None. + :type body: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionUpdateParameter :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -92,17 +340,19 @@ async def create( """ @overload - async def create( + async def update( self, resource_group_name: str, workspace_name: str, connection_name: str, - parameters: IO, + body: Optional[IO] = None, *, content_type: str = "application/json", **kwargs: Any ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: - """create. + """Update machine learning workspaces connections under the specified workspace. + + Update machine learning workspaces connections under the specified workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -111,8 +361,8 @@ async def create( :type workspace_name: str :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str - :param parameters: The object for creating or updating a new workspace connection. Required. - :type parameters: IO + :param body: Parameters for workspace connection update. Default value is None. + :type body: IO :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str @@ -123,15 +373,17 @@ async def create( """ @distributed_trace_async - async def create( + async def update( self, resource_group_name: str, workspace_name: str, connection_name: str, - parameters: Union[_models.WorkspaceConnectionPropertiesV2BasicResource, IO], + body: Optional[Union[_models.WorkspaceConnectionUpdateParameter, IO]] = None, **kwargs: Any ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: - """create. + """Update machine learning workspaces connections under the specified workspace. + + Update machine learning workspaces connections under the specified workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -140,10 +392,9 @@ async def create( :type workspace_name: str :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str - :param parameters: The object for creating or updating a new workspace connection. Is either a - WorkspaceConnectionPropertiesV2BasicResource type or a IO type. Required. - :type parameters: - ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource or IO + :param body: Parameters for workspace connection update. Is either a + WorkspaceConnectionUpdateParameter type or a IO type. Default value is None. + :type body: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionUpdateParameter or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. Default value is None. :paramtype content_type: str @@ -170,12 +421,15 @@ async def create( content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters + if isinstance(body, (IOBase, bytes)): + _content = body else: - _json = self._serialize.body(parameters, "WorkspaceConnectionPropertiesV2BasicResource") + if body is not None: + _json = self._serialize.body(body, "WorkspaceConnectionUpdateParameter") + else: + _json = None - request = build_create_request( + request = build_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, @@ -184,7 +438,7 @@ async def create( content_type=content_type, json=_json, content=_content, - template_url=self.create.metadata["url"], + template_url=self.update.metadata["url"], headers=_headers, params=_params, ) @@ -210,15 +464,91 @@ async def create( return deserialized - create.metadata = { + update.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}" } + @overload + async def create( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[_models.WorkspaceConnectionPropertiesV2BasicResource] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """Create or update machine learning workspaces connections under the specified workspace. + + Create or update machine learning workspaces connections under the specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :param body: The object for creating or updating a new workspace connection. Default value is + None. + :type body: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[IO] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """Create or update machine learning workspaces connections under the specified workspace. + + Create or update machine learning workspaces connections under the specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :param body: The object for creating or updating a new workspace connection. Default value is + None. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + @distributed_trace_async - async def get( - self, resource_group_name: str, workspace_name: str, connection_name: str, **kwargs: Any + async def create( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[Union[_models.WorkspaceConnectionPropertiesV2BasicResource, IO]] = None, + **kwargs: Any ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: - """get. + """Create or update machine learning workspaces connections under the specified workspace. + + Create or update machine learning workspaces connections under the specified workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -227,6 +557,13 @@ async def get( :type workspace_name: str :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str + :param body: The object for creating or updating a new workspace connection. Is either a + WorkspaceConnectionPropertiesV2BasicResource type or a IO type. Default value is None. + :type body: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource @@ -240,19 +577,34 @@ async def get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = kwargs.pop("headers", {}) or {} + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResource] = kwargs.pop("cls", None) - request = build_get_request( + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + if body is not None: + _json = self._serialize.body(body, "WorkspaceConnectionPropertiesV2BasicResource") + else: + _json = None + + request = build_create_request( resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], + content_type=content_type, + json=_json, + content=_content, + template_url=self.create.metadata["url"], headers=_headers, params=_params, ) @@ -278,15 +630,17 @@ async def get( return deserialized - get.metadata = { + create.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}" } @distributed_trace_async - async def delete( # pylint: disable=inconsistent-return-statements + async def list_secrets( self, resource_group_name: str, workspace_name: str, connection_name: str, **kwargs: Any - ) -> None: - """delete. + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """List all the secrets of a machine learning workspaces connections. + + List all the secrets of a machine learning workspaces connections. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -296,8 +650,8 @@ async def delete( # pylint: disable=inconsistent-return-statements :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None or the result of cls(response) - :rtype: None + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource :raises ~azure.core.exceptions.HttpResponseError: """ error_map = { @@ -312,15 +666,15 @@ async def delete( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResource] = kwargs.pop("cls", None) - request = build_delete_request( + request = build_list_secrets_request( resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], + template_url=self.list_secrets.metadata["url"], headers=_headers, params=_params, ) @@ -334,121 +688,18 @@ async def delete( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}" - } - - @distributed_trace - def list( - self, - resource_group_name: str, - workspace_name: str, - target: Optional[str] = None, - category: Optional[str] = None, - **kwargs: Any - ) -> AsyncIterable["_models.WorkspaceConnectionPropertiesV2BasicResource"]: - """list. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. Required. - :type workspace_name: str - :param target: Target of the workspace connection. Default value is None. - :type target: str - :param category: Category of the workspace connection. Default value is None. - :type category: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either WorkspaceConnectionPropertiesV2BasicResource or - the result of cls(response) - :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult] = kwargs.pop("cls", None) - - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - request = build_list_request( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - subscription_id=self._config.subscription_id, - target=target, - category=category, - api_version=api_version, - template_url=self.list.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize( - "WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult", pipeline_response - ) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = self._deserialize("WorkspaceConnectionPropertiesV2BasicResource", pipeline_response) - return pipeline_response + if cls: + return cls(pipeline_response, deserialized, {}) - return AsyncItemPaged(get_next, extract_data) + return deserialized - list.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections" + list_secrets.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}/listsecrets" } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspaces_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspaces_operations.py index 1195fbbbd75b..4e0f8c6dba6d 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspaces_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspaces_operations.py @@ -71,20 +71,30 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace_async - async def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _models.Workspace: - """Gets the properties of the specified machine learning workspace. + @distributed_trace + def list_by_subscription( + self, skip: Optional[str] = None, kind: Optional[str] = None, **kwargs: Any + ) -> AsyncIterable["_models.Workspace"]: + """Lists all the available machine learning workspaces under the specified subscription. - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. Required. - :type workspace_name: str + Lists all the available machine learning workspaces under the specified subscription. + + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param kind: Kind of workspace. Default value is None. + :type kind: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Workspace or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.Workspace + :return: An iterator like instance of either Workspace or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) + error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -93,50 +103,169 @@ async def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any } error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + skip=skip, + kind=kind, + api_version=api_version, + template_url=self.list_by_subscription.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("WorkspaceListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list_by_subscription.metadata = { + "url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces" + } + + @distributed_trace + def list_by_resource_group( + self, resource_group_name: str, skip: Optional[str] = None, kind: Optional[str] = None, **kwargs: Any + ) -> AsyncIterable["_models.Workspace"]: + """Lists all the available machine learning workspaces under the specified resource group. + + Lists all the available machine learning workspaces under the specified resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param kind: Kind of workspace. Default value is None. + :type kind: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either Workspace or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) + cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - request = build_get_request( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=self.get.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) + def prepare_request(next_link=None): + if not next_link: - response = pipeline_response.http_response + request = build_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + skip=skip, + kind=kind, + api_version=api_version, + template_url=self.list_by_resource_group.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request - deserialized = self._deserialize("Workspace", pipeline_response) + async def extract_data(pipeline_response): + deserialized = self._deserialize("WorkspaceListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) - if cls: - return cls(pipeline_response, deserialized, {}) + async def get_next(next_link=None): + request = prepare_request(next_link) - return deserialized + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list_by_resource_group.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces" } - async def _create_or_update_initial( - self, resource_group_name: str, workspace_name: str, parameters: Union[_models.Workspace, IO], **kwargs: Any - ) -> Optional[_models.Workspace]: + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, force_to_purge: bool = False, **kwargs: Any + ) -> None: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -145,30 +274,19 @@ async def _create_or_update_initial( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.Workspace]] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "Workspace") + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_create_or_update_request( + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, + force_to_purge=force_to_purge, api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - template_url=self._create_or_update_initial.metadata["url"], + template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) @@ -182,117 +300,33 @@ async def _create_or_update_initial( response = pipeline_response.http_response - if response.status_code not in [200, 202]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("Workspace", pipeline_response) - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, None, {}) - _create_or_update_initial.metadata = { + _delete_initial.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" } - @overload - async def begin_create_or_update( - self, - resource_group_name: str, - workspace_name: str, - parameters: _models.Workspace, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> AsyncLROPoller[_models.Workspace]: - """Creates or updates a workspace with the specified parameters. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. Required. - :type workspace_name: str - :param parameters: The parameters for creating or updating a machine learning workspace. - Required. - :type parameters: ~azure.mgmt.machinelearningservices.models.Workspace - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either Workspace or the result of - cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Workspace] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def begin_create_or_update( - self, - resource_group_name: str, - workspace_name: str, - parameters: IO, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> AsyncLROPoller[_models.Workspace]: - """Creates or updates a workspace with the specified parameters. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. Required. - :type workspace_name: str - :param parameters: The parameters for creating or updating a machine learning workspace. - Required. - :type parameters: IO - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either Workspace or the result of - cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Workspace] - :raises ~azure.core.exceptions.HttpResponseError: - """ - @distributed_trace_async - async def begin_create_or_update( - self, resource_group_name: str, workspace_name: str, parameters: Union[_models.Workspace, IO], **kwargs: Any - ) -> AsyncLROPoller[_models.Workspace]: - """Creates or updates a workspace with the specified parameters. + async def begin_delete( + self, resource_group_name: str, workspace_name: str, force_to_purge: bool = False, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Deletes a machine learning workspace. + + Deletes a machine learning workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param parameters: The parameters for creating or updating a machine learning workspace. Is - either a Workspace type or a IO type. Required. - :type parameters: ~azure.mgmt.machinelearningservices.models.Workspace or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str + :param force_to_purge: Flag to indicate delete is a purge request. Default value is False. + :type force_to_purge: bool :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -301,28 +335,24 @@ async def begin_create_or_update( :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either Workspace or the result of - cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) + cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._create_or_update_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, - parameters=parameters, + force_to_purge=force_to_purge, api_version=api_version, - content_type=content_type, cls=lambda x, y, z: x, headers=_headers, params=_params, @@ -330,11 +360,9 @@ async def begin_create_or_update( ) kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("Workspace", pipeline_response) + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized + return cls(pipeline_response, None, {}) if polling is True: polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) @@ -351,13 +379,26 @@ def get_long_running_output(pipeline_response): ) return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = { + begin_delete.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" } - async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> None: + @distributed_trace_async + async def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _models.Workspace: + """Gets the properties of the specified machine learning workspace. + + Gets the properties of the specified machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Workspace or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Workspace + :raises ~azure.core.exceptions.HttpResponseError: + """ error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -370,14 +411,82 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) - request = build_delete_request( + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("Workspace", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" + } + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + body: Union[_models.WorkspaceUpdateParameters, IO], + **kwargs: Any + ) -> Optional[_models.Workspace]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.Workspace]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "WorkspaceUpdateParameters") + + request = build_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata["url"], + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], headers=_headers, params=_params, ) @@ -391,27 +500,125 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response - if response.status_code not in [200, 202, 204]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize("Workspace", pipeline_response) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) - _delete_initial.metadata = { + return deserialized + + _update_initial.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" } + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + body: _models.WorkspaceUpdateParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Updates a machine learning workspace with the specified parameters. + + Updates a machine learning workspace with the specified parameters. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param body: The parameters for updating a machine learning workspace. Required. + :type body: ~azure.mgmt.machinelearningservices.models.WorkspaceUpdateParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Workspace or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Updates a machine learning workspace with the specified parameters. + + Updates a machine learning workspace with the specified parameters. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param body: The parameters for updating a machine learning workspace. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Workspace or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + @distributed_trace_async - async def begin_delete(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> AsyncLROPoller[None]: - """Deletes a machine learning workspace. + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + body: Union[_models.WorkspaceUpdateParameters, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Updates a machine learning workspace with the specified parameters. + + Updates a machine learning workspace with the specified parameters. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str + :param body: The parameters for updating a machine learning workspace. Is either a + WorkspaceUpdateParameters type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.WorkspaceUpdateParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -420,23 +627,28 @@ async def begin_delete(self, resource_group_name: str, workspace_name: str, **kw :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] + :return: An instance of AsyncLROPoller that returns either Workspace or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: """ - _headers = kwargs.pop("headers", {}) or {} + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( # type: ignore + raw_result = await self._update_initial( resource_group_name=resource_group_name, workspace_name=workspace_name, + body=body, api_version=api_version, + content_type=content_type, cls=lambda x, y, z: x, headers=_headers, params=_params, @@ -444,9 +656,11 @@ async def begin_delete(self, resource_group_name: str, workspace_name: str, **kw ) kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("Workspace", pipeline_response) if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) + return deserialized if polling is True: polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) @@ -463,16 +677,12 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- ) return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = { + begin_update.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" } - async def _update_initial( - self, - resource_group_name: str, - workspace_name: str, - parameters: Union[_models.WorkspaceUpdateParameters, IO], - **kwargs: Any + async def _create_or_update_initial( + self, resource_group_name: str, workspace_name: str, body: Union[_models.Workspace, IO], **kwargs: Any ) -> Optional[_models.Workspace]: error_map = { 401: ClientAuthenticationError, @@ -492,12 +702,12 @@ async def _update_initial( content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters + if isinstance(body, (IOBase, bytes)): + _content = body else: - _json = self._serialize.body(parameters, "WorkspaceUpdateParameters") + _json = self._serialize.body(body, "Workspace") - request = build_update_request( + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, @@ -505,7 +715,7 @@ async def _update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._update_initial.metadata["url"], + template_url=self._create_or_update_initial.metadata["url"], headers=_headers, params=_params, ) @@ -525,37 +735,44 @@ async def _update_initial( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None + response_headers = {} if response.status_code == 200: deserialized = self._deserialize("Workspace", pipeline_response) + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, response_headers) return deserialized - _update_initial.metadata = { + _create_or_update_initial.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" } @overload - async def begin_update( + async def begin_create_or_update( self, resource_group_name: str, workspace_name: str, - parameters: _models.WorkspaceUpdateParameters, + body: _models.Workspace, *, content_type: str = "application/json", **kwargs: Any ) -> AsyncLROPoller[_models.Workspace]: - """Updates a machine learning workspace with the specified parameters. + """Creates or updates a workspace with the specified parameters. + + Creates or updates a workspace with the specified parameters. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param parameters: The parameters for updating a machine learning workspace. Required. - :type parameters: ~azure.mgmt.machinelearningservices.models.WorkspaceUpdateParameters + :param body: The parameters for creating or updating a machine learning workspace. Required. + :type body: ~azure.mgmt.machinelearningservices.models.Workspace :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -575,24 +792,26 @@ async def begin_update( """ @overload - async def begin_update( + async def begin_create_or_update( self, resource_group_name: str, workspace_name: str, - parameters: IO, + body: IO, *, content_type: str = "application/json", **kwargs: Any ) -> AsyncLROPoller[_models.Workspace]: - """Updates a machine learning workspace with the specified parameters. + """Creates or updates a workspace with the specified parameters. + + Creates or updates a workspace with the specified parameters. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param parameters: The parameters for updating a machine learning workspace. Required. - :type parameters: IO + :param body: The parameters for creating or updating a machine learning workspace. Required. + :type body: IO :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str @@ -612,23 +831,21 @@ async def begin_update( """ @distributed_trace_async - async def begin_update( - self, - resource_group_name: str, - workspace_name: str, - parameters: Union[_models.WorkspaceUpdateParameters, IO], - **kwargs: Any + async def begin_create_or_update( + self, resource_group_name: str, workspace_name: str, body: Union[_models.Workspace, IO], **kwargs: Any ) -> AsyncLROPoller[_models.Workspace]: - """Updates a machine learning workspace with the specified parameters. + """Creates or updates a workspace with the specified parameters. + + Creates or updates a workspace with the specified parameters. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param parameters: The parameters for updating a machine learning workspace. Is either a - WorkspaceUpdateParameters type or a IO type. Required. - :type parameters: ~azure.mgmt.machinelearningservices.models.WorkspaceUpdateParameters or IO + :param body: The parameters for creating or updating a machine learning workspace. Is either a + Workspace type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.Workspace or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. Default value is None. :paramtype content_type: str @@ -656,142 +873,51 @@ async def begin_update( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._update_initial( + raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, workspace_name=workspace_name, - parameters=parameters, + body=body, api_version=api_version, content_type=content_type, cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("Workspace", pipeline_response) - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: - polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" - } - - @distributed_trace - def list_by_resource_group( - self, resource_group_name: str, skip: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.Workspace"]: - """Lists all the available machine learning workspaces under the specified resource group. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param skip: Continuation token for pagination. Default value is None. - :type skip: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either Workspace or the result of cls(response) - :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.Workspace] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - request = build_list_by_resource_group_request( - resource_group_name=resource_group_name, - subscription_id=self._config.subscription_id, - skip=skip, - api_version=api_version, - template_url=self.list_by_resource_group.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) - async def extract_data(pipeline_response): - deserialized = self._deserialize("WorkspaceListResult", pipeline_response) - list_of_elem = deserialized.value + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("Workspace", pipeline_response) if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) + return cls(pipeline_response, deserialized, {}) + return deserialized - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - list_by_resource_group.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces" + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" } async def _diagnose_initial( self, resource_group_name: str, workspace_name: str, - parameters: Optional[Union[_models.DiagnoseWorkspaceParameters, IO]] = None, + body: Optional[Union[_models.DiagnoseWorkspaceParameters, IO]] = None, **kwargs: Any ) -> Optional[_models.DiagnoseResponseResult]: error_map = { @@ -812,11 +938,11 @@ async def _diagnose_initial( content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters + if isinstance(body, (IOBase, bytes)): + _content = body else: - if parameters is not None: - _json = self._serialize.body(parameters, "DiagnoseWorkspaceParameters") + if body is not None: + _json = self._serialize.body(body, "DiagnoseWorkspaceParameters") else: _json = None @@ -870,7 +996,7 @@ async def begin_diagnose( self, resource_group_name: str, workspace_name: str, - parameters: Optional[_models.DiagnoseWorkspaceParameters] = None, + body: Optional[_models.DiagnoseWorkspaceParameters] = None, *, content_type: str = "application/json", **kwargs: Any @@ -884,8 +1010,8 @@ async def begin_diagnose( :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param parameters: The parameter of diagnosing workspace health. Default value is None. - :type parameters: ~azure.mgmt.machinelearningservices.models.DiagnoseWorkspaceParameters + :param body: The parameter of diagnosing workspace health. Default value is None. + :type body: ~azure.mgmt.machinelearningservices.models.DiagnoseWorkspaceParameters :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -909,7 +1035,7 @@ async def begin_diagnose( self, resource_group_name: str, workspace_name: str, - parameters: Optional[IO] = None, + body: Optional[IO] = None, *, content_type: str = "application/json", **kwargs: Any @@ -923,8 +1049,8 @@ async def begin_diagnose( :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param parameters: The parameter of diagnosing workspace health. Default value is None. - :type parameters: IO + :param body: The parameter of diagnosing workspace health. Default value is None. + :type body: IO :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str @@ -948,7 +1074,7 @@ async def begin_diagnose( self, resource_group_name: str, workspace_name: str, - parameters: Optional[Union[_models.DiagnoseWorkspaceParameters, IO]] = None, + body: Optional[Union[_models.DiagnoseWorkspaceParameters, IO]] = None, **kwargs: Any ) -> AsyncLROPoller[_models.DiagnoseResponseResult]: """Diagnose workspace setup issue. @@ -960,9 +1086,9 @@ async def begin_diagnose( :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param parameters: The parameter of diagnosing workspace health. Is either a + :param body: The parameter of diagnosing workspace health. Is either a DiagnoseWorkspaceParameters type or a IO type. Default value is None. - :type parameters: ~azure.mgmt.machinelearningservices.models.DiagnoseWorkspaceParameters or IO + :type body: ~azure.mgmt.machinelearningservices.models.DiagnoseWorkspaceParameters or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. Default value is None. :paramtype content_type: str @@ -993,7 +1119,7 @@ async def begin_diagnose( raw_result = await self._diagnose_initial( resource_group_name=resource_group_name, workspace_name=workspace_name, - parameters=parameters, + body=body, api_version=api_version, content_type=content_type, cls=lambda x, y, z: x, @@ -1037,6 +1163,9 @@ async def list_keys( """Lists all the keys associated with this workspace. This includes keys for the storage account, app insights and password for container registry. + Lists all the keys associated with this workspace. This includes keys for the storage account, + app insights and password for container registry. + :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str @@ -1096,9 +1225,24 @@ async def list_keys( "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys" } - async def _resync_keys_initial( # pylint: disable=inconsistent-return-statements + @distributed_trace_async + async def list_notebook_access_token( self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> None: + ) -> _models.NotebookAccessTokenResult: + """Get Azure Machine Learning Workspace notebook access token. + + Get Azure Machine Learning Workspace notebook access token. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: NotebookAccessTokenResult or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.NotebookAccessTokenResult + :raises ~azure.core.exceptions.HttpResponseError: + """ error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1111,14 +1255,14 @@ async def _resync_keys_initial( # pylint: disable=inconsistent-return-statement _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[_models.NotebookAccessTokenResult] = kwargs.pop("cls", None) - request = build_resync_keys_request( + request = build_list_notebook_access_token_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._resync_keys_initial.metadata["url"], + template_url=self.list_notebook_access_token.metadata["url"], headers=_headers, params=_params, ) @@ -1132,24 +1276,29 @@ async def _resync_keys_initial( # pylint: disable=inconsistent-return-statement response = pipeline_response.http_response - if response.status_code not in [200, 202]: + if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = self._deserialize("NotebookAccessTokenResult", pipeline_response) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) - _resync_keys_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys" + return deserialized + + list_notebook_access_token.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken" } @distributed_trace_async - async def begin_resync_keys( + async def list_notebook_keys( self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> AsyncLROPoller[None]: - """Resync all the keys associated with this workspace. This includes keys for the storage account, - app insights and password for container registry. + ) -> _models.ListNotebookKeysResult: + """Lists keys of Azure Machine Learning Workspaces notebook. + + Lists keys of Azure Machine Learning Workspaces notebook. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -1157,78 +1306,77 @@ async def begin_resync_keys( :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] + :return: ListNotebookKeysResult or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ListNotebookKeysResult :raises ~azure.core.exceptions.HttpResponseError: """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = await self._resync_keys_initial( # type: ignore - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - kwargs.pop("error_map", None) + cls: ClsType[_models.ListNotebookKeysResult] = kwargs.pop("cls", None) - def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements - if cls: - return cls(pipeline_response, None, {}) + request = build_list_notebook_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_notebook_keys.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - if polling is True: - polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) - begin_resync_keys.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys" + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ListNotebookKeysResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_notebook_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys" } - @distributed_trace - def list_by_subscription(self, skip: Optional[str] = None, **kwargs: Any) -> AsyncIterable["_models.Workspace"]: - """Lists all the available machine learning workspaces under the specified subscription. + @distributed_trace_async + async def list_storage_account_keys( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.ListStorageAccountKeysResult: + """Lists keys of Azure Machine Learning Workspace's storage account. - :param skip: Continuation token for pagination. Default value is None. - :type skip: str + Lists keys of Azure Machine Learning Workspace's storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either Workspace or the result of cls(response) - :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.Workspace] + :return: ListStorageAccountKeysResult or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ListStorageAccountKeysResult :raises ~azure.core.exceptions.HttpResponseError: """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1237,72 +1385,56 @@ def list_by_subscription(self, skip: Optional[str] = None, **kwargs: Any) -> Asy } error_map.update(kwargs.pop("error_map", {}) or {}) - def prepare_request(next_link=None): - if not next_link: + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - request = build_list_by_subscription_request( - subscription_id=self._config.subscription_id, - skip=skip, - api_version=api_version, - template_url=self.list_by_subscription.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ListStorageAccountKeysResult] = kwargs.pop("cls", None) - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + request = build_list_storage_account_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_storage_account_keys.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - async def extract_data(pipeline_response): - deserialized = self._deserialize("WorkspaceListResult", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, AsyncList(list_of_elem) + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) - async def get_next(next_link=None): - request = prepare_request(next_link) + response = pipeline_response.http_response - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = self._deserialize("ListStorageAccountKeysResult", pipeline_response) - return pipeline_response + if cls: + return cls(pipeline_response, deserialized, {}) - return AsyncItemPaged(get_next, extract_data) + return deserialized - list_by_subscription.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces" + list_storage_account_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys" } @distributed_trace_async - async def list_notebook_access_token( + async def list_outbound_network_dependencies_endpoints( self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> _models.NotebookAccessTokenResult: - """return notebook access token and refresh token. + ) -> _models.ExternalFQDNResponse: + """Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs) + programmatically. + + Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs) + programmatically. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -1310,8 +1442,8 @@ async def list_notebook_access_token( :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: NotebookAccessTokenResult or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.NotebookAccessTokenResult + :return: ExternalFQDNResponse or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ExternalFQDNResponse :raises ~azure.core.exceptions.HttpResponseError: """ error_map = { @@ -1326,14 +1458,14 @@ async def list_notebook_access_token( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.NotebookAccessTokenResult] = kwargs.pop("cls", None) + cls: ClsType[_models.ExternalFQDNResponse] = kwargs.pop("cls", None) - request = build_list_notebook_access_token_request( + request = build_list_outbound_network_dependencies_endpoints_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_notebook_access_token.metadata["url"], + template_url=self.list_outbound_network_dependencies_endpoints.metadata["url"], headers=_headers, params=_params, ) @@ -1352,15 +1484,15 @@ async def list_notebook_access_token( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("NotebookAccessTokenResult", pipeline_response) + deserialized = self._deserialize("ExternalFQDNResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_notebook_access_token.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken" + list_outbound_network_dependencies_endpoints.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints" } async def _prepare_notebook_initial( @@ -1405,11 +1537,16 @@ async def _prepare_notebook_initial( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None + response_headers = {} if response.status_code == 200: deserialized = self._deserialize("NotebookResourceInfo", pipeline_response) + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, response_headers) return deserialized @@ -1421,7 +1558,9 @@ async def _prepare_notebook_initial( async def begin_prepare_notebook( self, resource_group_name: str, workspace_name: str, **kwargs: Any ) -> AsyncLROPoller[_models.NotebookResourceInfo]: - """Prepare a notebook. + """Prepare Azure Machine Learning Workspace's notebook resource. + + Prepare Azure Machine Learning Workspace's notebook resource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -1489,87 +1628,9 @@ def get_long_running_output(pipeline_response): "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook" } - @distributed_trace_async - async def list_storage_account_keys( - self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> _models.ListStorageAccountKeysResult: - """List storage account keys of a workspace. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. Required. - :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListStorageAccountKeysResult or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.ListStorageAccountKeysResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ListStorageAccountKeysResult] = kwargs.pop("cls", None) - - request = build_list_storage_account_keys_request( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=self.list_storage_account_keys.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("ListStorageAccountKeysResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - list_storage_account_keys.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys" - } - - @distributed_trace_async - async def list_notebook_keys( + async def _resync_keys_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> _models.ListNotebookKeysResult: - """List keys of a notebook. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. Required. - :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListNotebookKeysResult or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.ListNotebookKeysResult - :raises ~azure.core.exceptions.HttpResponseError: - """ + ) -> None: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1582,14 +1643,14 @@ async def list_notebook_keys( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ListNotebookKeysResult] = kwargs.pop("cls", None) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_list_notebook_keys_request( + request = build_resync_keys_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_notebook_keys.metadata["url"], + template_url=self._resync_keys_initial.metadata["url"], headers=_headers, params=_params, ) @@ -1603,31 +1664,32 @@ async def list_notebook_keys( response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("ListNotebookKeysResult", pipeline_response) + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, None, response_headers) - list_notebook_keys.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys" + _resync_keys_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys" } @distributed_trace_async - async def list_outbound_network_dependencies_endpoints( + async def begin_resync_keys( self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> _models.ExternalFQDNResponse: - """Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs) - programmatically. + ) -> AsyncLROPoller[None]: + """Resync all the keys associated with this workspace.This includes keys for the storage account, + app insights and password for container registry. - Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs) - programmatically. + Resync all the keys associated with this workspace.This includes keys for the storage account, + app insights and password for container registry. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -1635,55 +1697,58 @@ async def list_outbound_network_dependencies_endpoints( :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ExternalFQDNResponse or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.ExternalFQDNResponse + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ExternalFQDNResponse] = kwargs.pop("cls", None) - - request = build_list_outbound_network_dependencies_endpoints_request( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=self.list_outbound_network_dependencies_endpoints.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("ExternalFQDNResponse", pipeline_response) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._resync_keys_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) - if cls: - return cls(pipeline_response, deserialized, {}) + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) - return deserialized + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - list_outbound_network_dependencies_endpoints.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints" + begin_resync_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys" } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/__init__.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/__init__.py index 339c533836e3..0456cddb8336 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/__init__.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/__init__.py @@ -9,12 +9,15 @@ from ._models_py3 import AKS from ._models_py3 import AKSSchema from ._models_py3 import AKSSchemaProperties +from ._models_py3 import AccessKeyAuthTypeWorkspaceConnectionProperties from ._models_py3 import AccountKeyDatastoreCredentials from ._models_py3 import AccountKeyDatastoreSecrets from ._models_py3 import AcrDetails +from ._models_py3 import ActualCapacityInfo from ._models_py3 import AksComputeSecrets from ._models_py3 import AksComputeSecretsProperties from ._models_py3 import AksNetworkingConfiguration +from ._models_py3 import AllFeatures from ._models_py3 import AllNodes from ._models_py3 import AmlCompute from ._models_py3 import AmlComputeNodeInformation @@ -22,10 +25,11 @@ from ._models_py3 import AmlComputeProperties from ._models_py3 import AmlComputeSchema from ._models_py3 import AmlOperation -from ._models_py3 import AmlOperationDisplay from ._models_py3 import AmlOperationListResult from ._models_py3 import AmlToken +from ._models_py3 import AmlTokenComputeIdentity from ._models_py3 import AmlUserFeature +from ._models_py3 import ApiKeyAuthWorkspaceConnectionProperties from ._models_py3 import ArmResourceId from ._models_py3 import AssetBase from ._models_py3 import AssetContainer @@ -33,6 +37,7 @@ from ._models_py3 import AssetJobOutput from ._models_py3 import AssetReferenceBase from ._models_py3 import AssignedUser +from ._models_py3 import AutoDeleteSetting from ._models_py3 import AutoForecastHorizon from ._models_py3 import AutoMLJob from ._models_py3 import AutoMLVertical @@ -42,28 +47,43 @@ from ._models_py3 import AutoSeasonality from ._models_py3 import AutoTargetLags from ._models_py3 import AutoTargetRollingWindowSize +from ._models_py3 import AutologgerSettings +from ._models_py3 import AzMonMonitoringAlertNotificationSettings from ._models_py3 import AzureBlobDatastore from ._models_py3 import AzureDataLakeGen1Datastore from ._models_py3 import AzureDataLakeGen2Datastore +from ._models_py3 import AzureDatastore +from ._models_py3 import AzureDevOpsWebhook from ._models_py3 import AzureFileDatastore +from ._models_py3 import AzureMLBatchInferencingServer +from ._models_py3 import AzureMLOnlineInferencingServer from ._models_py3 import BanditPolicy +from ._models_py3 import BaseEnvironmentId +from ._models_py3 import BaseEnvironmentSource from ._models_py3 import BatchDeployment +from ._models_py3 import BatchDeploymentConfiguration from ._models_py3 import BatchDeploymentProperties from ._models_py3 import BatchDeploymentTrackedResourceArmPaginatedResult from ._models_py3 import BatchEndpoint from ._models_py3 import BatchEndpointDefaults from ._models_py3 import BatchEndpointProperties from ._models_py3 import BatchEndpointTrackedResourceArmPaginatedResult +from ._models_py3 import BatchPipelineComponentDeploymentConfiguration from ._models_py3 import BatchRetrySettings from ._models_py3 import BayesianSamplingAlgorithm from ._models_py3 import BindOptions from ._models_py3 import BlobReferenceForConsumptionDto from ._models_py3 import BuildContext +from ._models_py3 import CategoricalDataDriftMetricThreshold +from ._models_py3 import CategoricalDataQualityMetricThreshold +from ._models_py3 import CategoricalPredictionDriftMetricThreshold from ._models_py3 import CertificateDatastoreCredentials from ._models_py3 import CertificateDatastoreSecrets from ._models_py3 import Classification +from ._models_py3 import ClassificationModelPerformanceMetricThreshold from ._models_py3 import ClassificationTrainingSettings from ._models_py3 import ClusterUpdateParameters +from ._models_py3 import CocoExportSummary from ._models_py3 import CodeConfiguration from ._models_py3 import CodeContainer from ._models_py3 import CodeContainerProperties @@ -71,9 +91,11 @@ from ._models_py3 import CodeVersion from ._models_py3 import CodeVersionProperties from ._models_py3 import CodeVersionResourceArmPaginatedResult +from ._models_py3 import Collection from ._models_py3 import ColumnTransformer from ._models_py3 import CommandJob from ._models_py3 import CommandJobLimits +from ._models_py3 import ComponentConfiguration from ._models_py3 import ComponentContainer from ._models_py3 import ComponentContainerProperties from ._models_py3 import ComponentContainerResourceArmPaginatedResult @@ -83,6 +105,7 @@ from ._models_py3 import Compute from ._models_py3 import ComputeInstance from ._models_py3 import ComputeInstanceApplication +from ._models_py3 import ComputeInstanceAutologgerSettings from ._models_py3 import ComputeInstanceConnectivityEndpoints from ._models_py3 import ComputeInstanceContainer from ._models_py3 import ComputeInstanceCreatedBy @@ -96,38 +119,55 @@ from ._models_py3 import ComputeInstanceVersion from ._models_py3 import ComputeResource from ._models_py3 import ComputeResourceSchema +from ._models_py3 import ComputeRuntimeDto from ._models_py3 import ComputeSchedules from ._models_py3 import ComputeSecrets from ._models_py3 import ComputeStartStopSchedule from ._models_py3 import ContainerResourceRequirements from ._models_py3 import ContainerResourceSettings from ._models_py3 import CosmosDbSettings +from ._models_py3 import CreateMonitorAction from ._models_py3 import Cron from ._models_py3 import CronTrigger +from ._models_py3 import CsvExportSummary from ._models_py3 import CustomForecastHorizon +from ._models_py3 import CustomInferencingServer +from ._models_py3 import CustomKeys +from ._models_py3 import CustomKeysWorkspaceConnectionProperties +from ._models_py3 import CustomMetricThreshold from ._models_py3 import CustomModelJobInput from ._models_py3 import CustomModelJobOutput +from ._models_py3 import CustomMonitoringSignal from ._models_py3 import CustomNCrossValidations from ._models_py3 import CustomSeasonality from ._models_py3 import CustomService from ._models_py3 import CustomTargetLags from ._models_py3 import CustomTargetRollingWindowSize +from ._models_py3 import DataCollector from ._models_py3 import DataContainer from ._models_py3 import DataContainerProperties from ._models_py3 import DataContainerResourceArmPaginatedResult +from ._models_py3 import DataDriftMetricThresholdBase +from ._models_py3 import DataDriftMonitoringSignal from ._models_py3 import DataFactory +from ._models_py3 import DataImport +from ._models_py3 import DataImportSource from ._models_py3 import DataLakeAnalytics from ._models_py3 import DataLakeAnalyticsSchema from ._models_py3 import DataLakeAnalyticsSchemaProperties from ._models_py3 import DataPathAssetReference +from ._models_py3 import DataQualityMetricThresholdBase +from ._models_py3 import DataQualityMonitoringSignal from ._models_py3 import DataVersionBase from ._models_py3 import DataVersionBaseProperties from ._models_py3 import DataVersionBaseResourceArmPaginatedResult +from ._models_py3 import DatabaseSource from ._models_py3 import Databricks from ._models_py3 import DatabricksComputeSecrets from ._models_py3 import DatabricksComputeSecretsProperties from ._models_py3 import DatabricksProperties from ._models_py3 import DatabricksSchema +from ._models_py3 import DatasetExportSummary from ._models_py3 import Datastore from ._models_py3 import DatastoreCredentials from ._models_py3 import DatastoreProperties @@ -145,8 +185,10 @@ from ._models_py3 import DistributionConfiguration from ._models_py3 import Docker from ._models_py3 import EarlyTerminationPolicy -from ._models_py3 import EncryptionKeyVaultProperties +from ._models_py3 import EmailMonitoringAlertNotificationSettings +from ._models_py3 import EncryptionKeyVaultUpdateProperties from ._models_py3 import EncryptionProperty +from ._models_py3 import EncryptionUpdateProperties from ._models_py3 import Endpoint from ._models_py3 import EndpointAuthKeys from ._models_py3 import EndpointAuthToken @@ -165,21 +207,54 @@ from ._models_py3 import ErrorResponse from ._models_py3 import EstimatedVMPrice from ._models_py3 import EstimatedVMPrices +from ._models_py3 import ExportSummary from ._models_py3 import ExternalFQDNResponse from ._models_py3 import FQDNEndpoint from ._models_py3 import FQDNEndpointDetail from ._models_py3 import FQDNEndpoints -from ._models_py3 import FQDNEndpointsProperties +from ._models_py3 import FQDNEndpointsPropertyBag +from ._models_py3 import Feature +from ._models_py3 import FeatureAttributionDriftMonitoringSignal +from ._models_py3 import FeatureAttributionMetricThreshold +from ._models_py3 import FeatureProperties +from ._models_py3 import FeatureResourceArmPaginatedResult +from ._models_py3 import FeatureStoreSettings +from ._models_py3 import FeatureSubset +from ._models_py3 import FeatureWindow +from ._models_py3 import FeaturesetContainer +from ._models_py3 import FeaturesetContainerProperties +from ._models_py3 import FeaturesetContainerResourceArmPaginatedResult +from ._models_py3 import FeaturesetSpecification +from ._models_py3 import FeaturesetVersion +from ._models_py3 import FeaturesetVersionBackfillRequest +from ._models_py3 import FeaturesetVersionBackfillResponse +from ._models_py3 import FeaturesetVersionProperties +from ._models_py3 import FeaturesetVersionResourceArmPaginatedResult +from ._models_py3 import FeaturestoreEntityContainer +from ._models_py3 import FeaturestoreEntityContainerProperties +from ._models_py3 import FeaturestoreEntityContainerResourceArmPaginatedResult +from ._models_py3 import FeaturestoreEntityVersion +from ._models_py3 import FeaturestoreEntityVersionProperties +from ._models_py3 import FeaturestoreEntityVersionResourceArmPaginatedResult from ._models_py3 import FeaturizationSettings +from ._models_py3 import FileSystemSource +from ._models_py3 import FixedInputData from ._models_py3 import FlavorData from ._models_py3 import ForecastHorizon from ._models_py3 import Forecasting from ._models_py3 import ForecastingSettings from ._models_py3 import ForecastingTrainingSettings +from ._models_py3 import FqdnOutboundRule +from ._models_py3 import GenerationSafetyQualityMetricThreshold +from ._models_py3 import GenerationSafetyQualityMonitoringSignal +from ._models_py3 import GenerationTokenStatisticsMetricThreshold +from ._models_py3 import GenerationTokenStatisticsSignal from ._models_py3 import GridSamplingAlgorithm +from ._models_py3 import GroupStatus from ._models_py3 import HDInsight from ._models_py3 import HDInsightProperties from ._models_py3 import HDInsightSchema +from ._models_py3 import HdfsDatastore from ._models_py3 import IdAssetReference from ._models_py3 import IdentityConfiguration from ._models_py3 import IdentityForCmk @@ -201,9 +276,22 @@ from ._models_py3 import ImageObjectDetectionBase from ._models_py3 import ImageSweepSettings from ._models_py3 import ImageVertical +from ._models_py3 import ImportDataAction +from ._models_py3 import IndexColumn from ._models_py3 import InferenceContainerProperties +from ._models_py3 import InferenceEndpoint +from ._models_py3 import InferenceEndpointMinimalTrackedResource +from ._models_py3 import InferenceEndpointMinimalTrackedResourceArmPaginatedResult +from ._models_py3 import InferenceGroup +from ._models_py3 import InferenceGroupMinimalTrackedResourceWithSku +from ._models_py3 import InferenceGroupMinimalTrackedResourceWithSkuArmPaginatedResult +from ._models_py3 import InferencePool +from ._models_py3 import InferencePoolProperties +from ._models_py3 import InferencePoolTrackedResourceArmPaginatedResult +from ._models_py3 import InferencingServer from ._models_py3 import InstanceTypeSchema from ._models_py3 import InstanceTypeSchemaResources +from ._models_py3 import IntellectualProperty from ._models_py3 import JobBase from ._models_py3 import JobBaseProperties from ._models_py3 import JobBaseResourceArmPaginatedResult @@ -213,10 +301,27 @@ from ._models_py3 import JobResourceConfiguration from ._models_py3 import JobScheduleAction from ._models_py3 import JobService +from ._models_py3 import KerberosCredentials +from ._models_py3 import KerberosKeytabCredentials +from ._models_py3 import KerberosKeytabSecrets +from ._models_py3 import KerberosPasswordCredentials +from ._models_py3 import KerberosPasswordSecrets +from ._models_py3 import KeyVaultProperties from ._models_py3 import Kubernetes from ._models_py3 import KubernetesOnlineDeployment from ._models_py3 import KubernetesProperties from ._models_py3 import KubernetesSchema +from ._models_py3 import LabelCategory +from ._models_py3 import LabelClass +from ._models_py3 import LabelingDataConfiguration +from ._models_py3 import LabelingJob +from ._models_py3 import LabelingJobImageProperties +from ._models_py3 import LabelingJobInstructions +from ._models_py3 import LabelingJobMediaProperties +from ._models_py3 import LabelingJobProperties +from ._models_py3 import LabelingJobResourceArmPaginatedResult +from ._models_py3 import LabelingJobTextProperties +from ._models_py3 import LakeHouseArtifact from ._models_py3 import ListAmlUserFeatureResult from ._models_py3 import ListNotebookKeysResult from ._models_py3 import ListStorageAccountKeysResult @@ -224,24 +329,53 @@ from ._models_py3 import ListWorkspaceKeysResult from ._models_py3 import ListWorkspaceQuotas from ._models_py3 import LiteralJobInput +from ._models_py3 import MLAssistConfiguration +from ._models_py3 import MLAssistConfigurationDisabled +from ._models_py3 import MLAssistConfigurationEnabled from ._models_py3 import MLFlowModelJobInput from ._models_py3 import MLFlowModelJobOutput from ._models_py3 import MLTableData from ._models_py3 import MLTableJobInput from ._models_py3 import MLTableJobOutput +from ._models_py3 import ManagedComputeIdentity from ._models_py3 import ManagedIdentity from ._models_py3 import ManagedIdentityAuthTypeWorkspaceConnectionProperties +from ._models_py3 import ManagedNetworkProvisionOptions +from ._models_py3 import ManagedNetworkProvisionStatus +from ._models_py3 import ManagedNetworkSettings from ._models_py3 import ManagedOnlineDeployment from ._models_py3 import ManagedServiceIdentity +from ._models_py3 import MaterializationComputeResource +from ._models_py3 import MaterializationSettings from ._models_py3 import MedianStoppingPolicy +from ._models_py3 import ModelConfiguration from ._models_py3 import ModelContainer from ._models_py3 import ModelContainerProperties from ._models_py3 import ModelContainerResourceArmPaginatedResult +from ._models_py3 import ModelPackageInput +from ._models_py3 import ModelPerformanceMetricThresholdBase +from ._models_py3 import ModelPerformanceSignal +from ._models_py3 import ModelProfile from ._models_py3 import ModelVersion from ._models_py3 import ModelVersionProperties from ._models_py3 import ModelVersionResourceArmPaginatedResult +from ._models_py3 import MonitorComputeConfigurationBase +from ._models_py3 import MonitorComputeIdentityBase +from ._models_py3 import MonitorDefinition +from ._models_py3 import MonitorServerlessSparkCompute +from ._models_py3 import MonitoringAlertNotificationSettingsBase +from ._models_py3 import MonitoringDataSegment +from ._models_py3 import MonitoringFeatureFilterBase +from ._models_py3 import MonitoringInputDataBase +from ._models_py3 import MonitoringSignalBase +from ._models_py3 import MonitoringTarget +from ._models_py3 import MonitoringThreshold +from ._models_py3 import MonitoringWorkspaceConnection from ._models_py3 import Mpi from ._models_py3 import NCrossValidations +from ._models_py3 import NlpFixedParameters +from ._models_py3 import NlpParameterSubspace +from ._models_py3 import NlpSweepSettings from ._models_py3 import NlpVertical from ._models_py3 import NlpVerticalFeaturizationSettings from ._models_py3 import NlpVerticalLimitSettings @@ -252,24 +386,46 @@ from ._models_py3 import NotebookAccessTokenResult from ._models_py3 import NotebookPreparationError from ._models_py3 import NotebookResourceInfo +from ._models_py3 import NotificationSetting +from ._models_py3 import NumericalDataDriftMetricThreshold +from ._models_py3 import NumericalDataQualityMetricThreshold +from ._models_py3 import NumericalPredictionDriftMetricThreshold from ._models_py3 import Objective +from ._models_py3 import OneLakeArtifact +from ._models_py3 import OneLakeDatastore from ._models_py3 import OnlineDeployment from ._models_py3 import OnlineDeploymentProperties from ._models_py3 import OnlineDeploymentTrackedResourceArmPaginatedResult from ._models_py3 import OnlineEndpoint from ._models_py3 import OnlineEndpointProperties from ._models_py3 import OnlineEndpointTrackedResourceArmPaginatedResult +from ._models_py3 import OnlineInferenceConfiguration from ._models_py3 import OnlineRequestSettings from ._models_py3 import OnlineScaleSettings +from ._models_py3 import OperationDisplay +from ._models_py3 import OsPatchingStatus +from ._models_py3 import OutboundRule +from ._models_py3 import OutboundRuleBasicResource +from ._models_py3 import OutboundRuleListResult from ._models_py3 import OutputPathAssetReference from ._models_py3 import PATAuthTypeWorkspaceConnectionProperties +from ._models_py3 import PackageInputPathBase +from ._models_py3 import PackageInputPathId +from ._models_py3 import PackageInputPathUrl +from ._models_py3 import PackageInputPathVersion +from ._models_py3 import PackageRequest +from ._models_py3 import PackageResponse from ._models_py3 import PaginatedComputeResourcesList from ._models_py3 import PartialBatchDeployment from ._models_py3 import PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties +from ._models_py3 import PartialJobBase +from ._models_py3 import PartialJobBasePartialResource from ._models_py3 import PartialManagedServiceIdentity from ._models_py3 import PartialMinimalTrackedResource from ._models_py3 import PartialMinimalTrackedResourceWithIdentity from ._models_py3 import PartialMinimalTrackedResourceWithSku +from ._models_py3 import PartialMinimalTrackedResourceWithSkuAndIdentity +from ._models_py3 import PartialNotificationSetting from ._models_py3 import PartialRegistryPartialTrackedResource from ._models_py3 import PartialSku from ._models_py3 import Password @@ -278,18 +434,29 @@ from ._models_py3 import PendingUploadResponseDto from ._models_py3 import PersonalComputeInstanceSettings from ._models_py3 import PipelineJob +from ._models_py3 import PoolEnvironmentConfiguration +from ._models_py3 import PoolModelConfiguration +from ._models_py3 import PoolStatus +from ._models_py3 import PredictionDriftMetricThresholdBase +from ._models_py3 import PredictionDriftMonitoringSignal from ._models_py3 import PrivateEndpoint from ._models_py3 import PrivateEndpointConnection from ._models_py3 import PrivateEndpointConnectionListResult +from ._models_py3 import PrivateEndpointDestination +from ._models_py3 import PrivateEndpointOutboundRule from ._models_py3 import PrivateEndpointResource from ._models_py3 import PrivateLinkResource from ._models_py3 import PrivateLinkResourceListResult from ._models_py3 import PrivateLinkServiceConnectionState from ._models_py3 import ProbeSettings +from ._models_py3 import ProgressMetrics +from ._models_py3 import PropertiesBase from ._models_py3 import PyTorch +from ._models_py3 import QueueSettings from ._models_py3 import QuotaBaseProperties from ._models_py3 import QuotaUpdateParameters from ._models_py3 import RandomSamplingAlgorithm +from ._models_py3 import Ray from ._models_py3 import Recurrence from ._models_py3 import RecurrenceSchedule from ._models_py3 import RecurrenceTrigger @@ -302,7 +469,11 @@ from ._models_py3 import RegistryRegionArmDetails from ._models_py3 import RegistryTrackedResourceArmPaginatedResult from ._models_py3 import Regression +from ._models_py3 import RegressionModelPerformanceMetricThreshold from ._models_py3 import RegressionTrainingSettings +from ._models_py3 import RequestConfiguration +from ._models_py3 import RequestLogging +from ._models_py3 import ResizeSchema from ._models_py3 import Resource from ._models_py3 import ResourceBase from ._models_py3 import ResourceConfiguration @@ -325,9 +496,16 @@ from ._models_py3 import ScriptReference from ._models_py3 import ScriptsToExecute from ._models_py3 import Seasonality +from ._models_py3 import SecretConfiguration +from ._models_py3 import ServerlessEndpoint +from ._models_py3 import ServerlessEndpointProperties +from ._models_py3 import ServerlessEndpointTrackedResourceArmPaginatedResult from ._models_py3 import ServiceManagedResourcesSettings +from ._models_py3 import ServicePrincipalAuthTypeWorkspaceConnectionProperties from ._models_py3 import ServicePrincipalDatastoreCredentials from ._models_py3 import ServicePrincipalDatastoreSecrets +from ._models_py3 import ServiceTagDestination +from ._models_py3 import ServiceTagOutboundRule from ._models_py3 import SetupScripts from ._models_py3 import SharedPrivateLinkResource from ._models_py3 import Sku @@ -335,8 +513,15 @@ from ._models_py3 import SkuResource from ._models_py3 import SkuResourceArmPaginatedResult from ._models_py3 import SkuSetting +from ._models_py3 import SparkJob +from ._models_py3 import SparkJobEntry +from ._models_py3 import SparkJobPythonEntry +from ._models_py3 import SparkJobScalaEntry +from ._models_py3 import SparkResourceConfiguration from ._models_py3 import SslConfiguration from ._models_py3 import StackEnsembleSettings +from ._models_py3 import StaticInputData +from ._models_py3 import StatusMessage from ._models_py3 import StorageAccountDetails from ._models_py3 import SweepJob from ._models_py3 import SweepJobLimits @@ -346,6 +531,9 @@ from ._models_py3 import SystemCreatedStorageAccount from ._models_py3 import SystemData from ._models_py3 import SystemService +from ._models_py3 import TableFixedParameters +from ._models_py3 import TableParameterSubspace +from ._models_py3 import TableSweepSettings from ._models_py3 import TableVertical from ._models_py3 import TableVerticalFeaturizationSettings from ._models_py3 import TableVerticalLimitSettings @@ -357,10 +545,13 @@ from ._models_py3 import TextClassificationMultilabel from ._models_py3 import TextNer from ._models_py3 import TmpfsOptions +from ._models_py3 import TopNFeaturesByAttribution from ._models_py3 import TrackedResource +from ._models_py3 import TrailingInputData from ._models_py3 import TrainingSettings from ._models_py3 import TrialComponent from ._models_py3 import TriggerBase +from ._models_py3 import TritonInferencingServer from ._models_py3 import TritonModelJobInput from ._models_py3 import TritonModelJobOutput from ._models_py3 import TruncationSelectionPolicy @@ -391,27 +582,42 @@ from ._models_py3 import VirtualMachineSshCredentials from ._models_py3 import VolumeDefinition from ._models_py3 import VolumeOptions +from ._models_py3 import Webhook from ._models_py3 import Workspace +from ._models_py3 import WorkspaceConnectionAccessKey +from ._models_py3 import WorkspaceConnectionApiKey from ._models_py3 import WorkspaceConnectionManagedIdentity from ._models_py3 import WorkspaceConnectionPersonalAccessToken from ._models_py3 import WorkspaceConnectionPropertiesV2 from ._models_py3 import WorkspaceConnectionPropertiesV2BasicResource from ._models_py3 import WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult +from ._models_py3 import WorkspaceConnectionServicePrincipal from ._models_py3 import WorkspaceConnectionSharedAccessSignature +from ._models_py3 import WorkspaceConnectionUpdateParameter from ._models_py3 import WorkspaceConnectionUsernamePassword +from ._models_py3 import WorkspaceHubConfig from ._models_py3 import WorkspaceListResult +from ._models_py3 import WorkspacePrivateEndpointResource from ._models_py3 import WorkspaceUpdateParameters from ._machine_learning_services_mgmt_client_enums import AllocationState from ._machine_learning_services_mgmt_client_enums import ApplicationSharingPolicy from ._machine_learning_services_mgmt_client_enums import AssetProvisioningState +from ._machine_learning_services_mgmt_client_enums import AuthMode +from ._machine_learning_services_mgmt_client_enums import AutoDeleteCondition from ._machine_learning_services_mgmt_client_enums import AutoRebuildSetting from ._machine_learning_services_mgmt_client_enums import Autosave +from ._machine_learning_services_mgmt_client_enums import BaseEnvironmentSourceType +from ._machine_learning_services_mgmt_client_enums import BatchDeploymentConfigurationType from ._machine_learning_services_mgmt_client_enums import BatchLoggingLevel from ._machine_learning_services_mgmt_client_enums import BatchOutputAction from ._machine_learning_services_mgmt_client_enums import BillingCurrency from ._machine_learning_services_mgmt_client_enums import BlockedTransformers from ._machine_learning_services_mgmt_client_enums import Caching +from ._machine_learning_services_mgmt_client_enums import CategoricalDataDriftMetric +from ._machine_learning_services_mgmt_client_enums import CategoricalDataQualityMetric +from ._machine_learning_services_mgmt_client_enums import CategoricalPredictionDriftMetric +from ._machine_learning_services_mgmt_client_enums import ClassificationModelPerformanceMetric from ._machine_learning_services_mgmt_client_enums import ClassificationModels from ._machine_learning_services_mgmt_client_enums import ClassificationMultilabelPrimaryMetrics from ._machine_learning_services_mgmt_client_enums import ClassificationPrimaryMetrics @@ -425,6 +631,9 @@ from ._machine_learning_services_mgmt_client_enums import ContainerType from ._machine_learning_services_mgmt_client_enums import CreatedByType from ._machine_learning_services_mgmt_client_enums import CredentialsType +from ._machine_learning_services_mgmt_client_enums import DataAvailabilityStatus +from ._machine_learning_services_mgmt_client_enums import DataCollectionMode +from ._machine_learning_services_mgmt_client_enums import DataImportSourceType from ._machine_learning_services_mgmt_client_enums import DataType from ._machine_learning_services_mgmt_client_enums import DatastoreType from ._machine_learning_services_mgmt_client_enums import DeploymentProvisioningState @@ -432,6 +641,7 @@ from ._machine_learning_services_mgmt_client_enums import DistributionType from ._machine_learning_services_mgmt_client_enums import EarlyTerminationPolicyType from ._machine_learning_services_mgmt_client_enums import EgressPublicNetworkAccessType +from ._machine_learning_services_mgmt_client_enums import EmailNotificationEnableType from ._machine_learning_services_mgmt_client_enums import EncryptionStatus from ._machine_learning_services_mgmt_client_enums import EndpointAuthMode from ._machine_learning_services_mgmt_client_enums import EndpointComputeType @@ -439,35 +649,71 @@ from ._machine_learning_services_mgmt_client_enums import EndpointServiceConnectionStatus from ._machine_learning_services_mgmt_client_enums import EnvironmentType from ._machine_learning_services_mgmt_client_enums import EnvironmentVariableType +from ._machine_learning_services_mgmt_client_enums import ExportFormatType +from ._machine_learning_services_mgmt_client_enums import FeatureAttributionMetric +from ._machine_learning_services_mgmt_client_enums import FeatureDataType from ._machine_learning_services_mgmt_client_enums import FeatureLags from ._machine_learning_services_mgmt_client_enums import FeaturizationMode from ._machine_learning_services_mgmt_client_enums import ForecastHorizonMode from ._machine_learning_services_mgmt_client_enums import ForecastingModels from ._machine_learning_services_mgmt_client_enums import ForecastingPrimaryMetrics +from ._machine_learning_services_mgmt_client_enums import GenerationSafetyQualityMetric +from ._machine_learning_services_mgmt_client_enums import GenerationTokenStatisticsMetric from ._machine_learning_services_mgmt_client_enums import Goal from ._machine_learning_services_mgmt_client_enums import IdentityConfigurationType +from ._machine_learning_services_mgmt_client_enums import ImageAnnotationType from ._machine_learning_services_mgmt_client_enums import ImageType +from ._machine_learning_services_mgmt_client_enums import IncrementalDataRefresh +from ._machine_learning_services_mgmt_client_enums import InferencingServerType from ._machine_learning_services_mgmt_client_enums import InputDeliveryMode +from ._machine_learning_services_mgmt_client_enums import InputPathType from ._machine_learning_services_mgmt_client_enums import InstanceSegmentationPrimaryMetrics +from ._machine_learning_services_mgmt_client_enums import IsolationMode from ._machine_learning_services_mgmt_client_enums import JobInputType from ._machine_learning_services_mgmt_client_enums import JobLimitsType from ._machine_learning_services_mgmt_client_enums import JobOutputType +from ._machine_learning_services_mgmt_client_enums import JobProvisioningState from ._machine_learning_services_mgmt_client_enums import JobStatus +from ._machine_learning_services_mgmt_client_enums import JobTier from ._machine_learning_services_mgmt_client_enums import JobType from ._machine_learning_services_mgmt_client_enums import KeyType from ._machine_learning_services_mgmt_client_enums import LearningRateScheduler from ._machine_learning_services_mgmt_client_enums import ListViewType from ._machine_learning_services_mgmt_client_enums import LoadBalancerType +from ._machine_learning_services_mgmt_client_enums import LogTrainingMetrics +from ._machine_learning_services_mgmt_client_enums import LogValidationLoss from ._machine_learning_services_mgmt_client_enums import LogVerbosity +from ._machine_learning_services_mgmt_client_enums import MLAssistConfigurationType +from ._machine_learning_services_mgmt_client_enums import MLFlowAutologgerState +from ._machine_learning_services_mgmt_client_enums import ManagedNetworkStatus from ._machine_learning_services_mgmt_client_enums import ManagedServiceIdentityType +from ._machine_learning_services_mgmt_client_enums import MaterializationStoreType +from ._machine_learning_services_mgmt_client_enums import MediaType +from ._machine_learning_services_mgmt_client_enums import MlflowAutologger from ._machine_learning_services_mgmt_client_enums import ModelSize +from ._machine_learning_services_mgmt_client_enums import ModelTaskType +from ._machine_learning_services_mgmt_client_enums import MonitorComputeIdentityType +from ._machine_learning_services_mgmt_client_enums import MonitorComputeType +from ._machine_learning_services_mgmt_client_enums import MonitoringAlertNotificationType +from ._machine_learning_services_mgmt_client_enums import MonitoringFeatureDataType +from ._machine_learning_services_mgmt_client_enums import MonitoringFeatureFilterType +from ._machine_learning_services_mgmt_client_enums import MonitoringInputDataType +from ._machine_learning_services_mgmt_client_enums import MonitoringModelType +from ._machine_learning_services_mgmt_client_enums import MonitoringNotificationMode +from ._machine_learning_services_mgmt_client_enums import MonitoringSignalType from ._machine_learning_services_mgmt_client_enums import MountAction from ._machine_learning_services_mgmt_client_enums import MountState +from ._machine_learning_services_mgmt_client_enums import MultiSelect from ._machine_learning_services_mgmt_client_enums import NCrossValidationsMode from ._machine_learning_services_mgmt_client_enums import Network +from ._machine_learning_services_mgmt_client_enums import NlpLearningRateScheduler from ._machine_learning_services_mgmt_client_enums import NodeState from ._machine_learning_services_mgmt_client_enums import NodesValueType +from ._machine_learning_services_mgmt_client_enums import NumericalDataDriftMetric +from ._machine_learning_services_mgmt_client_enums import NumericalDataQualityMetric +from ._machine_learning_services_mgmt_client_enums import NumericalPredictionDriftMetric from ._machine_learning_services_mgmt_client_enums import ObjectDetectionPrimaryMetrics +from ._machine_learning_services_mgmt_client_enums import OneLakeArtifactType from ._machine_learning_services_mgmt_client_enums import OperatingSystemType from ._machine_learning_services_mgmt_client_enums import OperationName from ._machine_learning_services_mgmt_client_enums import OperationStatus @@ -475,22 +721,32 @@ from ._machine_learning_services_mgmt_client_enums import OrderString from ._machine_learning_services_mgmt_client_enums import OsType from ._machine_learning_services_mgmt_client_enums import OutputDeliveryMode +from ._machine_learning_services_mgmt_client_enums import PackageBuildState +from ._machine_learning_services_mgmt_client_enums import PackageInputDeliveryMode +from ._machine_learning_services_mgmt_client_enums import PackageInputType +from ._machine_learning_services_mgmt_client_enums import PatchStatus from ._machine_learning_services_mgmt_client_enums import PendingUploadCredentialType from ._machine_learning_services_mgmt_client_enums import PendingUploadType +from ._machine_learning_services_mgmt_client_enums import PoolProvisioningState from ._machine_learning_services_mgmt_client_enums import PrivateEndpointConnectionProvisioningState -from ._machine_learning_services_mgmt_client_enums import PrivateEndpointServiceConnectionStatus +from ._machine_learning_services_mgmt_client_enums import ProtectionLevel from ._machine_learning_services_mgmt_client_enums import Protocol from ._machine_learning_services_mgmt_client_enums import ProvisioningState from ._machine_learning_services_mgmt_client_enums import ProvisioningStatus -from ._machine_learning_services_mgmt_client_enums import PublicNetworkAccess from ._machine_learning_services_mgmt_client_enums import PublicNetworkAccessType from ._machine_learning_services_mgmt_client_enums import QuotaUnit from ._machine_learning_services_mgmt_client_enums import RandomSamplingAlgorithmRule from ._machine_learning_services_mgmt_client_enums import RecurrenceFrequency from ._machine_learning_services_mgmt_client_enums import ReferenceType +from ._machine_learning_services_mgmt_client_enums import RegressionModelPerformanceMetric from ._machine_learning_services_mgmt_client_enums import RegressionModels from ._machine_learning_services_mgmt_client_enums import RegressionPrimaryMetrics from ._machine_learning_services_mgmt_client_enums import RemoteLoginPortPublicAccess +from ._machine_learning_services_mgmt_client_enums import RollingRateType +from ._machine_learning_services_mgmt_client_enums import RuleAction +from ._machine_learning_services_mgmt_client_enums import RuleCategory +from ._machine_learning_services_mgmt_client_enums import RuleStatus +from ._machine_learning_services_mgmt_client_enums import RuleType from ._machine_learning_services_mgmt_client_enums import SamplingAlgorithmType from ._machine_learning_services_mgmt_client_enums import ScaleType from ._machine_learning_services_mgmt_client_enums import ScheduleActionType @@ -505,16 +761,20 @@ from ._machine_learning_services_mgmt_client_enums import SkuScaleType from ._machine_learning_services_mgmt_client_enums import SkuTier from ._machine_learning_services_mgmt_client_enums import SourceType +from ._machine_learning_services_mgmt_client_enums import SparkJobEntryType from ._machine_learning_services_mgmt_client_enums import SshPublicAccess from ._machine_learning_services_mgmt_client_enums import SslConfigStatus from ._machine_learning_services_mgmt_client_enums import StackMetaLearnerType from ._machine_learning_services_mgmt_client_enums import Status +from ._machine_learning_services_mgmt_client_enums import StatusMessageLevel from ._machine_learning_services_mgmt_client_enums import StochasticOptimizer from ._machine_learning_services_mgmt_client_enums import StorageAccountType from ._machine_learning_services_mgmt_client_enums import TargetAggregationFunction from ._machine_learning_services_mgmt_client_enums import TargetLagsMode from ._machine_learning_services_mgmt_client_enums import TargetRollingWindowSizeMode from ._machine_learning_services_mgmt_client_enums import TaskType +from ._machine_learning_services_mgmt_client_enums import TextAnnotationType +from ._machine_learning_services_mgmt_client_enums import TrainingMode from ._machine_learning_services_mgmt_client_enums import TriggerType from ._machine_learning_services_mgmt_client_enums import UnderlyingResourceAction from ._machine_learning_services_mgmt_client_enums import UnitOfMeasure @@ -523,9 +783,9 @@ from ._machine_learning_services_mgmt_client_enums import VMPriceOSType from ._machine_learning_services_mgmt_client_enums import VMTier from ._machine_learning_services_mgmt_client_enums import ValidationMetricType -from ._machine_learning_services_mgmt_client_enums import ValueFormat from ._machine_learning_services_mgmt_client_enums import VmPriority from ._machine_learning_services_mgmt_client_enums import VolumeDefinitionType +from ._machine_learning_services_mgmt_client_enums import WebhookType from ._machine_learning_services_mgmt_client_enums import WeekDay from ._patch import __all__ as _patch_all from ._patch import * # pylint: disable=unused-wildcard-import @@ -535,12 +795,15 @@ "AKS", "AKSSchema", "AKSSchemaProperties", + "AccessKeyAuthTypeWorkspaceConnectionProperties", "AccountKeyDatastoreCredentials", "AccountKeyDatastoreSecrets", "AcrDetails", + "ActualCapacityInfo", "AksComputeSecrets", "AksComputeSecretsProperties", "AksNetworkingConfiguration", + "AllFeatures", "AllNodes", "AmlCompute", "AmlComputeNodeInformation", @@ -548,10 +811,11 @@ "AmlComputeProperties", "AmlComputeSchema", "AmlOperation", - "AmlOperationDisplay", "AmlOperationListResult", "AmlToken", + "AmlTokenComputeIdentity", "AmlUserFeature", + "ApiKeyAuthWorkspaceConnectionProperties", "ArmResourceId", "AssetBase", "AssetContainer", @@ -559,6 +823,7 @@ "AssetJobOutput", "AssetReferenceBase", "AssignedUser", + "AutoDeleteSetting", "AutoForecastHorizon", "AutoMLJob", "AutoMLVertical", @@ -568,28 +833,43 @@ "AutoSeasonality", "AutoTargetLags", "AutoTargetRollingWindowSize", + "AutologgerSettings", + "AzMonMonitoringAlertNotificationSettings", "AzureBlobDatastore", "AzureDataLakeGen1Datastore", "AzureDataLakeGen2Datastore", + "AzureDatastore", + "AzureDevOpsWebhook", "AzureFileDatastore", + "AzureMLBatchInferencingServer", + "AzureMLOnlineInferencingServer", "BanditPolicy", + "BaseEnvironmentId", + "BaseEnvironmentSource", "BatchDeployment", + "BatchDeploymentConfiguration", "BatchDeploymentProperties", "BatchDeploymentTrackedResourceArmPaginatedResult", "BatchEndpoint", "BatchEndpointDefaults", "BatchEndpointProperties", "BatchEndpointTrackedResourceArmPaginatedResult", + "BatchPipelineComponentDeploymentConfiguration", "BatchRetrySettings", "BayesianSamplingAlgorithm", "BindOptions", "BlobReferenceForConsumptionDto", "BuildContext", + "CategoricalDataDriftMetricThreshold", + "CategoricalDataQualityMetricThreshold", + "CategoricalPredictionDriftMetricThreshold", "CertificateDatastoreCredentials", "CertificateDatastoreSecrets", "Classification", + "ClassificationModelPerformanceMetricThreshold", "ClassificationTrainingSettings", "ClusterUpdateParameters", + "CocoExportSummary", "CodeConfiguration", "CodeContainer", "CodeContainerProperties", @@ -597,9 +877,11 @@ "CodeVersion", "CodeVersionProperties", "CodeVersionResourceArmPaginatedResult", + "Collection", "ColumnTransformer", "CommandJob", "CommandJobLimits", + "ComponentConfiguration", "ComponentContainer", "ComponentContainerProperties", "ComponentContainerResourceArmPaginatedResult", @@ -609,6 +891,7 @@ "Compute", "ComputeInstance", "ComputeInstanceApplication", + "ComputeInstanceAutologgerSettings", "ComputeInstanceConnectivityEndpoints", "ComputeInstanceContainer", "ComputeInstanceCreatedBy", @@ -622,38 +905,55 @@ "ComputeInstanceVersion", "ComputeResource", "ComputeResourceSchema", + "ComputeRuntimeDto", "ComputeSchedules", "ComputeSecrets", "ComputeStartStopSchedule", "ContainerResourceRequirements", "ContainerResourceSettings", "CosmosDbSettings", + "CreateMonitorAction", "Cron", "CronTrigger", + "CsvExportSummary", "CustomForecastHorizon", + "CustomInferencingServer", + "CustomKeys", + "CustomKeysWorkspaceConnectionProperties", + "CustomMetricThreshold", "CustomModelJobInput", "CustomModelJobOutput", + "CustomMonitoringSignal", "CustomNCrossValidations", "CustomSeasonality", "CustomService", "CustomTargetLags", "CustomTargetRollingWindowSize", + "DataCollector", "DataContainer", "DataContainerProperties", "DataContainerResourceArmPaginatedResult", + "DataDriftMetricThresholdBase", + "DataDriftMonitoringSignal", "DataFactory", + "DataImport", + "DataImportSource", "DataLakeAnalytics", "DataLakeAnalyticsSchema", "DataLakeAnalyticsSchemaProperties", "DataPathAssetReference", + "DataQualityMetricThresholdBase", + "DataQualityMonitoringSignal", "DataVersionBase", "DataVersionBaseProperties", "DataVersionBaseResourceArmPaginatedResult", + "DatabaseSource", "Databricks", "DatabricksComputeSecrets", "DatabricksComputeSecretsProperties", "DatabricksProperties", "DatabricksSchema", + "DatasetExportSummary", "Datastore", "DatastoreCredentials", "DatastoreProperties", @@ -671,8 +971,10 @@ "DistributionConfiguration", "Docker", "EarlyTerminationPolicy", - "EncryptionKeyVaultProperties", + "EmailMonitoringAlertNotificationSettings", + "EncryptionKeyVaultUpdateProperties", "EncryptionProperty", + "EncryptionUpdateProperties", "Endpoint", "EndpointAuthKeys", "EndpointAuthToken", @@ -691,21 +993,54 @@ "ErrorResponse", "EstimatedVMPrice", "EstimatedVMPrices", + "ExportSummary", "ExternalFQDNResponse", "FQDNEndpoint", "FQDNEndpointDetail", "FQDNEndpoints", - "FQDNEndpointsProperties", + "FQDNEndpointsPropertyBag", + "Feature", + "FeatureAttributionDriftMonitoringSignal", + "FeatureAttributionMetricThreshold", + "FeatureProperties", + "FeatureResourceArmPaginatedResult", + "FeatureStoreSettings", + "FeatureSubset", + "FeatureWindow", + "FeaturesetContainer", + "FeaturesetContainerProperties", + "FeaturesetContainerResourceArmPaginatedResult", + "FeaturesetSpecification", + "FeaturesetVersion", + "FeaturesetVersionBackfillRequest", + "FeaturesetVersionBackfillResponse", + "FeaturesetVersionProperties", + "FeaturesetVersionResourceArmPaginatedResult", + "FeaturestoreEntityContainer", + "FeaturestoreEntityContainerProperties", + "FeaturestoreEntityContainerResourceArmPaginatedResult", + "FeaturestoreEntityVersion", + "FeaturestoreEntityVersionProperties", + "FeaturestoreEntityVersionResourceArmPaginatedResult", "FeaturizationSettings", + "FileSystemSource", + "FixedInputData", "FlavorData", "ForecastHorizon", "Forecasting", "ForecastingSettings", "ForecastingTrainingSettings", + "FqdnOutboundRule", + "GenerationSafetyQualityMetricThreshold", + "GenerationSafetyQualityMonitoringSignal", + "GenerationTokenStatisticsMetricThreshold", + "GenerationTokenStatisticsSignal", "GridSamplingAlgorithm", + "GroupStatus", "HDInsight", "HDInsightProperties", "HDInsightSchema", + "HdfsDatastore", "IdAssetReference", "IdentityConfiguration", "IdentityForCmk", @@ -727,9 +1062,22 @@ "ImageObjectDetectionBase", "ImageSweepSettings", "ImageVertical", + "ImportDataAction", + "IndexColumn", "InferenceContainerProperties", + "InferenceEndpoint", + "InferenceEndpointMinimalTrackedResource", + "InferenceEndpointMinimalTrackedResourceArmPaginatedResult", + "InferenceGroup", + "InferenceGroupMinimalTrackedResourceWithSku", + "InferenceGroupMinimalTrackedResourceWithSkuArmPaginatedResult", + "InferencePool", + "InferencePoolProperties", + "InferencePoolTrackedResourceArmPaginatedResult", + "InferencingServer", "InstanceTypeSchema", "InstanceTypeSchemaResources", + "IntellectualProperty", "JobBase", "JobBaseProperties", "JobBaseResourceArmPaginatedResult", @@ -739,10 +1087,27 @@ "JobResourceConfiguration", "JobScheduleAction", "JobService", + "KerberosCredentials", + "KerberosKeytabCredentials", + "KerberosKeytabSecrets", + "KerberosPasswordCredentials", + "KerberosPasswordSecrets", + "KeyVaultProperties", "Kubernetes", "KubernetesOnlineDeployment", "KubernetesProperties", "KubernetesSchema", + "LabelCategory", + "LabelClass", + "LabelingDataConfiguration", + "LabelingJob", + "LabelingJobImageProperties", + "LabelingJobInstructions", + "LabelingJobMediaProperties", + "LabelingJobProperties", + "LabelingJobResourceArmPaginatedResult", + "LabelingJobTextProperties", + "LakeHouseArtifact", "ListAmlUserFeatureResult", "ListNotebookKeysResult", "ListStorageAccountKeysResult", @@ -750,24 +1115,53 @@ "ListWorkspaceKeysResult", "ListWorkspaceQuotas", "LiteralJobInput", + "MLAssistConfiguration", + "MLAssistConfigurationDisabled", + "MLAssistConfigurationEnabled", "MLFlowModelJobInput", "MLFlowModelJobOutput", "MLTableData", "MLTableJobInput", "MLTableJobOutput", + "ManagedComputeIdentity", "ManagedIdentity", "ManagedIdentityAuthTypeWorkspaceConnectionProperties", + "ManagedNetworkProvisionOptions", + "ManagedNetworkProvisionStatus", + "ManagedNetworkSettings", "ManagedOnlineDeployment", "ManagedServiceIdentity", + "MaterializationComputeResource", + "MaterializationSettings", "MedianStoppingPolicy", + "ModelConfiguration", "ModelContainer", "ModelContainerProperties", "ModelContainerResourceArmPaginatedResult", + "ModelPackageInput", + "ModelPerformanceMetricThresholdBase", + "ModelPerformanceSignal", + "ModelProfile", "ModelVersion", "ModelVersionProperties", "ModelVersionResourceArmPaginatedResult", + "MonitorComputeConfigurationBase", + "MonitorComputeIdentityBase", + "MonitorDefinition", + "MonitorServerlessSparkCompute", + "MonitoringAlertNotificationSettingsBase", + "MonitoringDataSegment", + "MonitoringFeatureFilterBase", + "MonitoringInputDataBase", + "MonitoringSignalBase", + "MonitoringTarget", + "MonitoringThreshold", + "MonitoringWorkspaceConnection", "Mpi", "NCrossValidations", + "NlpFixedParameters", + "NlpParameterSubspace", + "NlpSweepSettings", "NlpVertical", "NlpVerticalFeaturizationSettings", "NlpVerticalLimitSettings", @@ -778,24 +1172,46 @@ "NotebookAccessTokenResult", "NotebookPreparationError", "NotebookResourceInfo", + "NotificationSetting", + "NumericalDataDriftMetricThreshold", + "NumericalDataQualityMetricThreshold", + "NumericalPredictionDriftMetricThreshold", "Objective", + "OneLakeArtifact", + "OneLakeDatastore", "OnlineDeployment", "OnlineDeploymentProperties", "OnlineDeploymentTrackedResourceArmPaginatedResult", "OnlineEndpoint", "OnlineEndpointProperties", "OnlineEndpointTrackedResourceArmPaginatedResult", + "OnlineInferenceConfiguration", "OnlineRequestSettings", "OnlineScaleSettings", + "OperationDisplay", + "OsPatchingStatus", + "OutboundRule", + "OutboundRuleBasicResource", + "OutboundRuleListResult", "OutputPathAssetReference", "PATAuthTypeWorkspaceConnectionProperties", + "PackageInputPathBase", + "PackageInputPathId", + "PackageInputPathUrl", + "PackageInputPathVersion", + "PackageRequest", + "PackageResponse", "PaginatedComputeResourcesList", "PartialBatchDeployment", "PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties", + "PartialJobBase", + "PartialJobBasePartialResource", "PartialManagedServiceIdentity", "PartialMinimalTrackedResource", "PartialMinimalTrackedResourceWithIdentity", "PartialMinimalTrackedResourceWithSku", + "PartialMinimalTrackedResourceWithSkuAndIdentity", + "PartialNotificationSetting", "PartialRegistryPartialTrackedResource", "PartialSku", "Password", @@ -804,18 +1220,29 @@ "PendingUploadResponseDto", "PersonalComputeInstanceSettings", "PipelineJob", + "PoolEnvironmentConfiguration", + "PoolModelConfiguration", + "PoolStatus", + "PredictionDriftMetricThresholdBase", + "PredictionDriftMonitoringSignal", "PrivateEndpoint", "PrivateEndpointConnection", "PrivateEndpointConnectionListResult", + "PrivateEndpointDestination", + "PrivateEndpointOutboundRule", "PrivateEndpointResource", "PrivateLinkResource", "PrivateLinkResourceListResult", "PrivateLinkServiceConnectionState", "ProbeSettings", + "ProgressMetrics", + "PropertiesBase", "PyTorch", + "QueueSettings", "QuotaBaseProperties", "QuotaUpdateParameters", "RandomSamplingAlgorithm", + "Ray", "Recurrence", "RecurrenceSchedule", "RecurrenceTrigger", @@ -828,7 +1255,11 @@ "RegistryRegionArmDetails", "RegistryTrackedResourceArmPaginatedResult", "Regression", + "RegressionModelPerformanceMetricThreshold", "RegressionTrainingSettings", + "RequestConfiguration", + "RequestLogging", + "ResizeSchema", "Resource", "ResourceBase", "ResourceConfiguration", @@ -851,9 +1282,16 @@ "ScriptReference", "ScriptsToExecute", "Seasonality", + "SecretConfiguration", + "ServerlessEndpoint", + "ServerlessEndpointProperties", + "ServerlessEndpointTrackedResourceArmPaginatedResult", "ServiceManagedResourcesSettings", + "ServicePrincipalAuthTypeWorkspaceConnectionProperties", "ServicePrincipalDatastoreCredentials", "ServicePrincipalDatastoreSecrets", + "ServiceTagDestination", + "ServiceTagOutboundRule", "SetupScripts", "SharedPrivateLinkResource", "Sku", @@ -861,8 +1299,15 @@ "SkuResource", "SkuResourceArmPaginatedResult", "SkuSetting", + "SparkJob", + "SparkJobEntry", + "SparkJobPythonEntry", + "SparkJobScalaEntry", + "SparkResourceConfiguration", "SslConfiguration", "StackEnsembleSettings", + "StaticInputData", + "StatusMessage", "StorageAccountDetails", "SweepJob", "SweepJobLimits", @@ -872,6 +1317,9 @@ "SystemCreatedStorageAccount", "SystemData", "SystemService", + "TableFixedParameters", + "TableParameterSubspace", + "TableSweepSettings", "TableVertical", "TableVerticalFeaturizationSettings", "TableVerticalLimitSettings", @@ -883,10 +1331,13 @@ "TextClassificationMultilabel", "TextNer", "TmpfsOptions", + "TopNFeaturesByAttribution", "TrackedResource", + "TrailingInputData", "TrainingSettings", "TrialComponent", "TriggerBase", + "TritonInferencingServer", "TritonModelJobInput", "TritonModelJobOutput", "TruncationSelectionPolicy", @@ -917,26 +1368,41 @@ "VirtualMachineSshCredentials", "VolumeDefinition", "VolumeOptions", + "Webhook", "Workspace", + "WorkspaceConnectionAccessKey", + "WorkspaceConnectionApiKey", "WorkspaceConnectionManagedIdentity", "WorkspaceConnectionPersonalAccessToken", "WorkspaceConnectionPropertiesV2", "WorkspaceConnectionPropertiesV2BasicResource", "WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult", + "WorkspaceConnectionServicePrincipal", "WorkspaceConnectionSharedAccessSignature", + "WorkspaceConnectionUpdateParameter", "WorkspaceConnectionUsernamePassword", + "WorkspaceHubConfig", "WorkspaceListResult", + "WorkspacePrivateEndpointResource", "WorkspaceUpdateParameters", "AllocationState", "ApplicationSharingPolicy", "AssetProvisioningState", + "AuthMode", + "AutoDeleteCondition", "AutoRebuildSetting", "Autosave", + "BaseEnvironmentSourceType", + "BatchDeploymentConfigurationType", "BatchLoggingLevel", "BatchOutputAction", "BillingCurrency", "BlockedTransformers", "Caching", + "CategoricalDataDriftMetric", + "CategoricalDataQualityMetric", + "CategoricalPredictionDriftMetric", + "ClassificationModelPerformanceMetric", "ClassificationModels", "ClassificationMultilabelPrimaryMetrics", "ClassificationPrimaryMetrics", @@ -950,6 +1416,9 @@ "ContainerType", "CreatedByType", "CredentialsType", + "DataAvailabilityStatus", + "DataCollectionMode", + "DataImportSourceType", "DataType", "DatastoreType", "DeploymentProvisioningState", @@ -957,6 +1426,7 @@ "DistributionType", "EarlyTerminationPolicyType", "EgressPublicNetworkAccessType", + "EmailNotificationEnableType", "EncryptionStatus", "EndpointAuthMode", "EndpointComputeType", @@ -964,35 +1434,71 @@ "EndpointServiceConnectionStatus", "EnvironmentType", "EnvironmentVariableType", + "ExportFormatType", + "FeatureAttributionMetric", + "FeatureDataType", "FeatureLags", "FeaturizationMode", "ForecastHorizonMode", "ForecastingModels", "ForecastingPrimaryMetrics", + "GenerationSafetyQualityMetric", + "GenerationTokenStatisticsMetric", "Goal", "IdentityConfigurationType", + "ImageAnnotationType", "ImageType", + "IncrementalDataRefresh", + "InferencingServerType", "InputDeliveryMode", + "InputPathType", "InstanceSegmentationPrimaryMetrics", + "IsolationMode", "JobInputType", "JobLimitsType", "JobOutputType", + "JobProvisioningState", "JobStatus", + "JobTier", "JobType", "KeyType", "LearningRateScheduler", "ListViewType", "LoadBalancerType", + "LogTrainingMetrics", + "LogValidationLoss", "LogVerbosity", + "MLAssistConfigurationType", + "MLFlowAutologgerState", + "ManagedNetworkStatus", "ManagedServiceIdentityType", + "MaterializationStoreType", + "MediaType", + "MlflowAutologger", "ModelSize", + "ModelTaskType", + "MonitorComputeIdentityType", + "MonitorComputeType", + "MonitoringAlertNotificationType", + "MonitoringFeatureDataType", + "MonitoringFeatureFilterType", + "MonitoringInputDataType", + "MonitoringModelType", + "MonitoringNotificationMode", + "MonitoringSignalType", "MountAction", "MountState", + "MultiSelect", "NCrossValidationsMode", "Network", + "NlpLearningRateScheduler", "NodeState", "NodesValueType", + "NumericalDataDriftMetric", + "NumericalDataQualityMetric", + "NumericalPredictionDriftMetric", "ObjectDetectionPrimaryMetrics", + "OneLakeArtifactType", "OperatingSystemType", "OperationName", "OperationStatus", @@ -1000,22 +1506,32 @@ "OrderString", "OsType", "OutputDeliveryMode", + "PackageBuildState", + "PackageInputDeliveryMode", + "PackageInputType", + "PatchStatus", "PendingUploadCredentialType", "PendingUploadType", + "PoolProvisioningState", "PrivateEndpointConnectionProvisioningState", - "PrivateEndpointServiceConnectionStatus", + "ProtectionLevel", "Protocol", "ProvisioningState", "ProvisioningStatus", - "PublicNetworkAccess", "PublicNetworkAccessType", "QuotaUnit", "RandomSamplingAlgorithmRule", "RecurrenceFrequency", "ReferenceType", + "RegressionModelPerformanceMetric", "RegressionModels", "RegressionPrimaryMetrics", "RemoteLoginPortPublicAccess", + "RollingRateType", + "RuleAction", + "RuleCategory", + "RuleStatus", + "RuleType", "SamplingAlgorithmType", "ScaleType", "ScheduleActionType", @@ -1030,16 +1546,20 @@ "SkuScaleType", "SkuTier", "SourceType", + "SparkJobEntryType", "SshPublicAccess", "SslConfigStatus", "StackMetaLearnerType", "Status", + "StatusMessageLevel", "StochasticOptimizer", "StorageAccountType", "TargetAggregationFunction", "TargetLagsMode", "TargetRollingWindowSizeMode", "TaskType", + "TextAnnotationType", + "TrainingMode", "TriggerType", "UnderlyingResourceAction", "UnitOfMeasure", @@ -1048,9 +1568,9 @@ "VMPriceOSType", "VMTier", "ValidationMetricType", - "ValueFormat", "VmPriority", "VolumeDefinitionType", + "WebhookType", "WeekDay", ] __all__.extend([p for p in _patch_all if p not in __all__]) diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_machine_learning_services_mgmt_client_enums.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_machine_learning_services_mgmt_client_enums.py index a32b5b391440..28ccdcf39982 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_machine_learning_services_mgmt_client_enums.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_machine_learning_services_mgmt_client_enums.py @@ -43,6 +43,19 @@ class AssetProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): DELETING = "Deleting" +class AuthMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine endpoint authentication mode.""" + + AAD = "AAD" + + +class AutoDeleteCondition(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """AutoDeleteCondition.""" + + CREATED_GREATER_THAN = "CreatedGreaterThan" + LAST_ACCESSED_GREATER_THAN = "LastAccessedGreaterThan" + + class AutoRebuildSetting(str, Enum, metaclass=CaseInsensitiveEnumMeta): """AutoRebuild setting for the derived image.""" @@ -58,6 +71,19 @@ class Autosave(str, Enum, metaclass=CaseInsensitiveEnumMeta): REMOTE = "Remote" +class BaseEnvironmentSourceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Base environment type.""" + + ENVIRONMENT_ASSET = "EnvironmentAsset" + + +class BatchDeploymentConfigurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The enumerated property types for batch deployments.""" + + MODEL = "Model" + PIPELINE_COMPONENT = "PipelineComponent" + + class BatchLoggingLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Log verbosity for batch inferencing. Increasing verbosity order for logging is : Warning, Info and Debug. @@ -119,6 +145,50 @@ class Caching(str, Enum, metaclass=CaseInsensitiveEnumMeta): READ_WRITE = "ReadWrite" +class CategoricalDataDriftMetric(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """CategoricalDataDriftMetric.""" + + JENSEN_SHANNON_DISTANCE = "JensenShannonDistance" + """The Jensen Shannon Distance (JSD) metric.""" + POPULATION_STABILITY_INDEX = "PopulationStabilityIndex" + """The Population Stability Index (PSI) metric.""" + PEARSONS_CHI_SQUARED_TEST = "PearsonsChiSquaredTest" + """The Pearsons Chi Squared Test metric.""" + + +class CategoricalDataQualityMetric(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """CategoricalDataQualityMetric.""" + + NULL_VALUE_RATE = "NullValueRate" + """Calculates the rate of null values.""" + DATA_TYPE_ERROR_RATE = "DataTypeErrorRate" + """Calculates the rate of data type errors.""" + OUT_OF_BOUNDS_RATE = "OutOfBoundsRate" + """Calculates the rate values are out of bounds.""" + + +class CategoricalPredictionDriftMetric(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """CategoricalPredictionDriftMetric.""" + + JENSEN_SHANNON_DISTANCE = "JensenShannonDistance" + """The Jensen Shannon Distance (JSD) metric.""" + POPULATION_STABILITY_INDEX = "PopulationStabilityIndex" + """The Population Stability Index (PSI) metric.""" + PEARSONS_CHI_SQUARED_TEST = "PearsonsChiSquaredTest" + """The Pearsons Chi Squared Test metric.""" + + +class ClassificationModelPerformanceMetric(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ClassificationModelPerformanceMetric.""" + + ACCURACY = "Accuracy" + """Calculates the accuracy of the model predictions.""" + PRECISION = "Precision" + """Calculates the precision of the model predictions.""" + RECALL = "Recall" + """Calculates the recall of the model predictions.""" + + class ClassificationModels(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Enum for all classification models supported by AutoML.""" @@ -166,7 +236,7 @@ class ClassificationModels(str, Enum, metaclass=CaseInsensitiveEnumMeta): #: decision rules inferred from the data features.""" RANDOM_FOREST = "RandomForest" """Random forest is a supervised learning algorithm. - #: The "forest"\ it builds, is an ensemble of decision trees, usually trained with the “bagging”\ + #: The "forest" it builds, is an ensemble of decision trees, usually trained with the bagging #: method. #: The general idea of the bagging method is that a combination of learning models increases the #: overall result.""" @@ -247,6 +317,7 @@ class ComputeInstanceState(str, Enum, metaclass=CaseInsensitiveEnumMeta): DELETING = "Deleting" RUNNING = "Running" RESTARTING = "Restarting" + RESIZING = "Resizing" JOB_RUNNING = "JobRunning" SETTING_UP = "SettingUp" SETUP_FAILED = "SetupFailed" @@ -260,7 +331,7 @@ class ComputeInstanceState(str, Enum, metaclass=CaseInsensitiveEnumMeta): class ComputePowerAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The compute power action.""" + """[Required] The compute power action.""" START = "Start" STOP = "Stop" @@ -289,6 +360,10 @@ class ConnectionAuthType(str, Enum, metaclass=CaseInsensitiveEnumMeta): USERNAME_PASSWORD = "UsernamePassword" NONE = "None" SAS = "SAS" + SERVICE_PRINCIPAL = "ServicePrincipal" + ACCESS_KEY = "AccessKey" + API_KEY = "ApiKey" + CUSTOM_KEYS = "CustomKeys" class ConnectionCategory(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -297,13 +372,30 @@ class ConnectionCategory(str, Enum, metaclass=CaseInsensitiveEnumMeta): PYTHON_FEED = "PythonFeed" CONTAINER_REGISTRY = "ContainerRegistry" GIT = "Git" + S3 = "S3" + SNOWFLAKE = "Snowflake" + AZURE_SQL_DB = "AzureSqlDb" + AZURE_SYNAPSE_ANALYTICS = "AzureSynapseAnalytics" + AZURE_MY_SQL_DB = "AzureMySqlDb" + AZURE_POSTGRES_DB = "AzurePostgresDb" + ADLS_GEN2 = "ADLSGen2" + REDIS = "Redis" + API_KEY = "ApiKey" + AZURE_OPEN_AI = "AzureOpenAI" + COGNITIVE_SEARCH = "CognitiveSearch" + COGNITIVE_SERVICE = "CognitiveService" + CUSTOM_KEYS = "CustomKeys" class ContainerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """ContainerType.""" + """The type of container to retrieve logs from.""" STORAGE_INITIALIZER = "StorageInitializer" + """The container used to download models and score script.""" INFERENCE_SERVER = "InferenceServer" + """The container used to serve user's request.""" + MODEL_DATA_COLLECTOR = "ModelDataCollector" + """The container used to collect payload and custom logging when mdc is enabled.""" class CreatedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -323,6 +415,31 @@ class CredentialsType(str, Enum, metaclass=CaseInsensitiveEnumMeta): NONE = "None" SAS = "Sas" SERVICE_PRINCIPAL = "ServicePrincipal" + KERBEROS_KEYTAB = "KerberosKeytab" + KERBEROS_PASSWORD = "KerberosPassword" + + +class DataAvailabilityStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """DataAvailabilityStatus.""" + + NONE = "None" + PENDING = "Pending" + INCOMPLETE = "Incomplete" + COMPLETE = "Complete" + + +class DataCollectionMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """DataCollectionMode.""" + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class DataImportSourceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the type of data.""" + + DATABASE = "database" + FILE_SYSTEM = "file_system" class DatastoreType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -332,6 +449,8 @@ class DatastoreType(str, Enum, metaclass=CaseInsensitiveEnumMeta): AZURE_DATA_LAKE_GEN1 = "AzureDataLakeGen1" AZURE_DATA_LAKE_GEN2 = "AzureDataLakeGen2" AZURE_FILE = "AzureFile" + HDFS = "Hdfs" + ONE_LAKE = "OneLake" class DataType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -368,6 +487,7 @@ class DistributionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): PY_TORCH = "PyTorch" TENSOR_FLOW = "TensorFlow" MPI = "Mpi" + RAY = "Ray" class EarlyTerminationPolicyType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -387,6 +507,14 @@ class EgressPublicNetworkAccessType(str, Enum, metaclass=CaseInsensitiveEnumMeta DISABLED = "Disabled" +class EmailNotificationEnableType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the email notification type.""" + + JOB_COMPLETED = "JobCompleted" + JOB_FAILED = "JobFailed" + JOB_CANCELLED = "JobCancelled" + + class EncryptionStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Indicates whether or not the encryption is enabled for the workspace.""" @@ -428,6 +556,7 @@ class EndpointServiceConnectionStatus(str, Enum, metaclass=CaseInsensitiveEnumMe PENDING = "Pending" REJECTED = "Rejected" DISCONNECTED = "Disconnected" + TIMEOUT = "Timeout" class EnvironmentType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -443,6 +572,34 @@ class EnvironmentVariableType(str, Enum, metaclass=CaseInsensitiveEnumMeta): LOCAL = "local" +class ExportFormatType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The format of exported labels.""" + + DATASET = "Dataset" + COCO = "Coco" + CSV = "CSV" + + +class FeatureAttributionMetric(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """FeatureAttributionMetric.""" + + NORMALIZED_DISCOUNTED_CUMULATIVE_GAIN = "NormalizedDiscountedCumulativeGain" + """The Normalized Discounted Cumulative Gain metric.""" + + +class FeatureDataType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """FeatureDataType.""" + + STRING = "String" + INTEGER = "Integer" + LONG = "Long" + FLOAT = "Float" + DOUBLE = "Double" + BINARY = "Binary" + DATETIME = "Datetime" + BOOLEAN = "Boolean" + + class FeatureLags(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Flag for generating lags for the numeric features.""" @@ -537,7 +694,7 @@ class ForecastingModels(str, Enum, metaclass=CaseInsensitiveEnumMeta): #: It's an inexact but powerful technique.""" RANDOM_FOREST = "RandomForest" """Random forest is a supervised learning algorithm. - #: The "forest" it builds, is an ensemble of decision trees, usually trained with the “bagging” + #: The "forest" it builds, is an ensemble of decision trees, usually trained with the bagging #: method. #: The general idea of the bagging method is that a combination of learning models increases the #: overall result.""" @@ -567,6 +724,28 @@ class ForecastingPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): #: Error (MAE) of (time) series with different scales.""" +class GenerationSafetyQualityMetric(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Generation safety quality metric enum.""" + + ACCEPTABLE_GROUNDEDNESS_SCORE_PER_INSTANCE = "AcceptableGroundednessScorePerInstance" + AGGREGATED_GROUNDEDNESS_PASS_RATE = "AggregatedGroundednessPassRate" + ACCEPTABLE_COHERENCE_SCORE_PER_INSTANCE = "AcceptableCoherenceScorePerInstance" + AGGREGATED_COHERENCE_PASS_RATE = "AggregatedCoherencePassRate" + ACCEPTABLE_FLUENCY_SCORE_PER_INSTANCE = "AcceptableFluencyScorePerInstance" + AGGREGATED_FLUENCY_PASS_RATE = "AggregatedFluencyPassRate" + ACCEPTABLE_SIMILARITY_SCORE_PER_INSTANCE = "AcceptableSimilarityScorePerInstance" + AGGREGATED_SIMILARITY_PASS_RATE = "AggregatedSimilarityPassRate" + ACCEPTABLE_RELEVANCE_SCORE_PER_INSTANCE = "AcceptableRelevanceScorePerInstance" + AGGREGATED_RELEVANCE_PASS_RATE = "AggregatedRelevancePassRate" + + +class GenerationTokenStatisticsMetric(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Generation token statistics metric enum.""" + + TOTAL_TOKEN_COUNT = "TotalTokenCount" + TOTAL_TOKEN_COUNT_PER_GROUP = "TotalTokenCountPerGroup" + + class Goal(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Defines supported metric goals for hyperparameter tuning.""" @@ -582,6 +761,14 @@ class IdentityConfigurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): USER_IDENTITY = "UserIdentity" +class ImageAnnotationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Annotation type of image data.""" + + CLASSIFICATION = "Classification" + BOUNDING_BOX = "BoundingBox" + INSTANCE_SEGMENTATION = "InstanceSegmentation" + + class ImageType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of the image. Possible values are: docker - For docker images. azureml - For AzureML images. @@ -591,6 +778,22 @@ class ImageType(str, Enum, metaclass=CaseInsensitiveEnumMeta): AZUREML = "azureml" +class IncrementalDataRefresh(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Whether IncrementalDataRefresh is enabled.""" + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class InferencingServerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Inferencing server type for various targets.""" + + AZURE_ML_ONLINE = "AzureMLOnline" + AZURE_ML_BATCH = "AzureMLBatch" + TRITON = "Triton" + CUSTOM = "Custom" + + class InputDeliveryMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Enum to determine the input data delivery mode.""" @@ -602,6 +805,14 @@ class InputDeliveryMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): EVAL_DOWNLOAD = "EvalDownload" +class InputPathType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Input path type for package inputs.""" + + URL = "Url" + PATH_ID = "PathId" + PATH_VERSION = "PathVersion" + + class InstanceSegmentationPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Primary metrics for InstanceSegmentation tasks.""" @@ -610,6 +821,14 @@ class InstanceSegmentationPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnu #: AP is calculated for each class and averaged to get the MAP.""" +class IsolationMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Isolation mode for the managed network of a machine learning workspace.""" + + DISABLED = "Disabled" + ALLOW_INTERNET_OUTBOUND = "AllowInternetOutbound" + ALLOW_ONLY_APPROVED_OUTBOUND = "AllowOnlyApprovedOutbound" + + class JobInputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Enum to determine the Job Input Type.""" @@ -640,6 +859,15 @@ class JobOutputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): TRITON_MODEL = "triton_model" +class JobProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the job provisioning state.""" + + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + IN_PROGRESS = "InProgress" + + class JobStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The status of a job.""" @@ -676,6 +904,18 @@ class JobStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The job is paused by users. Some adjustment to labeling jobs can be made only in paused state.""" UNKNOWN = "Unknown" """Default job status if not mapped to all other statuses""" + SCHEDULED = "Scheduled" + """The job is in a scheduled state. Job is not in any active state.""" + + +class JobTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the job tier.""" + + NULL = "Null" + SPOT = "Spot" + BASIC = "Basic" + STANDARD = "Standard" + PREMIUM = "Premium" class JobType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -683,8 +923,10 @@ class JobType(str, Enum, metaclass=CaseInsensitiveEnumMeta): AUTO_ML = "AutoML" COMMAND = "Command" + LABELING = "Labeling" SWEEP = "Sweep" PIPELINE = "Pipeline" + SPARK = "Spark" class KeyType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -720,6 +962,24 @@ class LoadBalancerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): INTERNAL_LOAD_BALANCER = "InternalLoadBalancer" +class LogTrainingMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """LogTrainingMetrics.""" + + ENABLE = "Enable" + """Enable compute and log training metrics.""" + DISABLE = "Disable" + """Disable compute and log training metrics.""" + + +class LogValidationLoss(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """LogValidationLoss.""" + + ENABLE = "Enable" + """Enable compute and log validation metrics.""" + DISABLE = "Disable" + """Disable compute and log validation metrics.""" + + class LogVerbosity(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Enum for setting log verbosity.""" @@ -737,6 +997,13 @@ class LogVerbosity(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Only critical statements logged.""" +class ManagedNetworkStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Status for the managed network of a machine learning workspace.""" + + INACTIVE = "Inactive" + ACTIVE = "Active" + + class ManagedServiceIdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Type of managed service identity (where both SystemAssigned and UserAssigned types are allowed). @@ -748,6 +1015,43 @@ class ManagedServiceIdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta): SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned" +class MaterializationStoreType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """MaterializationStoreType.""" + + NONE = "None" + ONLINE = "Online" + OFFLINE = "Offline" + ONLINE_AND_OFFLINE = "OnlineAndOffline" + + +class MediaType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Media type of data asset.""" + + IMAGE = "Image" + TEXT = "Text" + + +class MLAssistConfigurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """MLAssistConfigurationType.""" + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class MlflowAutologger(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Indicates whether mlflow autologger is enabled for notebooks.""" + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class MLFlowAutologgerState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the state of mlflow autologger.""" + + ENABLED = "Enabled" + DISABLED = "Disabled" + + class ModelSize(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Image model size.""" @@ -763,6 +1067,112 @@ class ModelSize(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Extra large size.""" +class ModelTaskType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Model task type enum.""" + + CLASSIFICATION = "Classification" + REGRESSION = "Regression" + QUESTION_ANSWERING = "QuestionAnswering" + + +class MonitorComputeIdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Monitor compute identity type enum.""" + + AML_TOKEN = "AmlToken" + """Authenticates through user's AML token.""" + MANAGED_IDENTITY = "ManagedIdentity" + """Authenticates through a user-provided managed identity.""" + + +class MonitorComputeType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Monitor compute type enum.""" + + SERVERLESS_SPARK = "ServerlessSpark" + """Serverless Spark compute.""" + + +class MonitoringAlertNotificationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """MonitoringAlertNotificationType.""" + + AZURE_MONITOR = "AzureMonitor" + """Settings for Azure Monitor based alerting.""" + EMAIL = "Email" + """Settings for AML email notifications.""" + + +class MonitoringFeatureDataType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """MonitoringFeatureDataType.""" + + NUMERICAL = "Numerical" + """Used for features of numerical data type.""" + CATEGORICAL = "Categorical" + """Used for features of categorical data type.""" + + +class MonitoringFeatureFilterType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """MonitoringFeatureFilterType.""" + + ALL_FEATURES = "AllFeatures" + """Includes all features.""" + TOP_N_BY_ATTRIBUTION = "TopNByAttribution" + """Only includes the top contributing features, measured by feature attribution.""" + FEATURE_SUBSET = "FeatureSubset" + """Includes a user-defined subset of features.""" + + +class MonitoringInputDataType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Monitoring input data type enum.""" + + STATIC = "Static" + """An input data with a fixed window size.""" + TRAILING = "Trailing" + """An input data which trailing relatively to the monitor's current run.""" + FIXED = "Fixed" + """An input data with tabular format which doesn't require preprocessing.""" + + +class MonitoringModelType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """MonitoringModelType.""" + + CLASSIFICATION = "Classification" + """A model trained for classification tasks.""" + REGRESSION = "Regression" + """A model trained for regressions tasks.""" + + +class MonitoringNotificationMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """MonitoringNotificationMode.""" + + DISABLED = "Disabled" + """Disabled notifications will not produce emails/metrics leveraged for alerting.""" + ENABLED = "Enabled" + """Enabled notification will produce emails/metrics leveraged for alerting.""" + + +class MonitoringSignalType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """MonitoringSignalType.""" + + DATA_DRIFT = "DataDrift" + """Tracks model input data distribution change, comparing against training data or past production + #: data.""" + PREDICTION_DRIFT = "PredictionDrift" + """Tracks prediction result data distribution change, comparing against validation/test label data + #: or past production data.""" + DATA_QUALITY = "DataQuality" + """Tracks model input data integrity.""" + FEATURE_ATTRIBUTION_DRIFT = "FeatureAttributionDrift" + """Tracks feature importance change in production, comparing against feature importance at + #: training time.""" + CUSTOM = "Custom" + """Tracks a custom signal provided by users.""" + MODEL_PERFORMANCE = "ModelPerformance" + """Tracks model performance based on ground truth data.""" + GENERATION_SAFETY_QUALITY = "GenerationSafetyQuality" + """Tracks the safety and quality of generated content.""" + GENERATION_TOKEN_STATISTICS = "GenerationTokenStatistics" + """Tracks the token usage of generative endpoints.""" + + class MountAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Mount Action.""" @@ -781,6 +1191,13 @@ class MountState(str, Enum, metaclass=CaseInsensitiveEnumMeta): UNMOUNTED = "Unmounted" +class MultiSelect(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Whether multiSelect is enabled.""" + + ENABLED = "Enabled" + DISABLED = "Disabled" + + class NCrossValidationsMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Determines how N-Cross validations value is determined.""" @@ -798,6 +1215,25 @@ class Network(str, Enum, metaclass=CaseInsensitiveEnumMeta): HOST = "Host" +class NlpLearningRateScheduler(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum of learning rate schedulers that aligns with those supported by HF.""" + + NONE = "None" + """No learning rate schedule.""" + LINEAR = "Linear" + """Linear warmup and decay.""" + COSINE = "Cosine" + """Linear warmup then cosine decay.""" + COSINE_WITH_RESTARTS = "CosineWithRestarts" + """Linear warmup, cosine decay, then restart to initial LR.""" + POLYNOMIAL = "Polynomial" + """Increase linearly then polynomially decay.""" + CONSTANT = "Constant" + """Constant learning rate.""" + CONSTANT_WITH_WARMUP = "ConstantWithWarmup" + """Linear warmup followed by constant value.""" + + class NodeState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """State of the compute node. Values are idle, running, preparing, unusable, leaving and preempted. @@ -815,6 +1251,44 @@ class NodesValueType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The enumerated types for the nodes value.""" ALL = "All" + CUSTOM = "Custom" + + +class NumericalDataDriftMetric(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """NumericalDataDriftMetric.""" + + JENSEN_SHANNON_DISTANCE = "JensenShannonDistance" + """The Jensen Shannon Distance (JSD) metric.""" + POPULATION_STABILITY_INDEX = "PopulationStabilityIndex" + """The Population Stability Index (PSI) metric.""" + NORMALIZED_WASSERSTEIN_DISTANCE = "NormalizedWassersteinDistance" + """The Normalized Wasserstein Distance metric.""" + TWO_SAMPLE_KOLMOGOROV_SMIRNOV_TEST = "TwoSampleKolmogorovSmirnovTest" + """The Two Sample Kolmogorov-Smirnov Test (two-sample K–S) metric.""" + + +class NumericalDataQualityMetric(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """NumericalDataQualityMetric.""" + + NULL_VALUE_RATE = "NullValueRate" + """Calculates the rate of null values.""" + DATA_TYPE_ERROR_RATE = "DataTypeErrorRate" + """Calculates the rate of data type errors.""" + OUT_OF_BOUNDS_RATE = "OutOfBoundsRate" + """Calculates the rate values are out of bounds.""" + + +class NumericalPredictionDriftMetric(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """NumericalPredictionDriftMetric.""" + + JENSEN_SHANNON_DISTANCE = "JensenShannonDistance" + """The Jensen Shannon Distance (JSD) metric.""" + POPULATION_STABILITY_INDEX = "PopulationStabilityIndex" + """The Population Stability Index (PSI) metric.""" + NORMALIZED_WASSERSTEIN_DISTANCE = "NormalizedWassersteinDistance" + """The Normalized Wasserstein Distance metric.""" + TWO_SAMPLE_KOLMOGOROV_SMIRNOV_TEST = "TwoSampleKolmogorovSmirnovTest" + """The Two Sample Kolmogorov-Smirnov Test (two-sample K–S) metric.""" class ObjectDetectionPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -825,6 +1299,12 @@ class ObjectDetectionPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta #: AP is calculated for each class and averaged to get the MAP.""" +class OneLakeArtifactType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine OneLake artifact type.""" + + LAKE_HOUSE = "LakeHouse" + + class OperatingSystemType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The type of operating system.""" @@ -839,6 +1319,7 @@ class OperationName(str, Enum, metaclass=CaseInsensitiveEnumMeta): START = "Start" STOP = "Stop" RESTART = "Restart" + RESIZE = "Resize" REIMAGE = "Reimage" DELETE = "Delete" @@ -852,6 +1333,7 @@ class OperationStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): START_FAILED = "StartFailed" STOP_FAILED = "StopFailed" RESTART_FAILED = "RestartFailed" + RESIZE_FAILED = "ResizeFailed" REIMAGE_FAILED = "ReimageFailed" DELETE_FAILED = "DeleteFailed" @@ -885,6 +1367,40 @@ class OutputDeliveryMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): READ_WRITE_MOUNT = "ReadWriteMount" UPLOAD = "Upload" + DIRECT = "Direct" + + +class PackageBuildState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Package build state returned in package response.""" + + NOT_STARTED = "NotStarted" + RUNNING = "Running" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + + +class PackageInputDeliveryMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Mounting type of the model or the inputs.""" + + COPY = "Copy" + DOWNLOAD = "Download" + + +class PackageInputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the inputs.""" + + URI_FILE = "UriFile" + URI_FOLDER = "UriFolder" + + +class PatchStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The os patching status.""" + + COMPLETED_WITH_WARNINGS = "CompletedWithWarnings" + FAILED = "Failed" + IN_PROGRESS = "InProgress" + SUCCEEDED = "Succeeded" + UNKNOWN = "Unknown" class PendingUploadCredentialType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -900,6 +1416,17 @@ class PendingUploadType(str, Enum, metaclass=CaseInsensitiveEnumMeta): TEMPORARY_BLOB_REFERENCE = "TemporaryBlobReference" +class PoolProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of pool related resources provisioning.""" + + CREATING = "Creating" + DELETING = "Deleting" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + UPDATING = "Updating" + CANCELED = "Canceled" + + class PrivateEndpointConnectionProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The current provisioning state.""" @@ -909,14 +1436,13 @@ class PrivateEndpointConnectionProvisioningState(str, Enum, metaclass=CaseInsens FAILED = "Failed" -class PrivateEndpointServiceConnectionStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The private endpoint connection status.""" +class ProtectionLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Protection level associated with the Intellectual Property.""" - PENDING = "Pending" - APPROVED = "Approved" - REJECTED = "Rejected" - DISCONNECTED = "Disconnected" - TIMEOUT = "Timeout" + ALL = "All" + """All means Intellectual Property is fully protected.""" + NONE = "None" + """None means it is not an Intellectual Property.""" class Protocol(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -928,8 +1454,8 @@ class Protocol(str, Enum, metaclass=CaseInsensitiveEnumMeta): class ProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The current deployment state of workspace resource. The provisioningState is to indicate states - for resource provisioning. + """The provision state of the cluster. Valid values are Unknown, Updating, Provisioning, + Succeeded, and Failed. """ UNKNOWN = "Unknown" @@ -949,13 +1475,6 @@ class ProvisioningStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): FAILED = "Failed" -class PublicNetworkAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Whether requests from Public Network are allowed.""" - - ENABLED = "Enabled" - DISABLED = "Disabled" - - class PublicNetworkAccessType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Enum to determine whether PublicNetworkAccess is Enabled or Disabled.""" @@ -999,6 +1518,17 @@ class ReferenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): OUTPUT_PATH = "OutputPath" +class RegressionModelPerformanceMetric(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """RegressionModelPerformanceMetric.""" + + MEAN_ABSOLUTE_ERROR = "MeanAbsoluteError" + """The Mean Absolute Error (MAE) metric.""" + ROOT_MEAN_SQUARED_ERROR = "RootMeanSquaredError" + """The Root Mean Squared Error (RMSE) metric.""" + MEAN_SQUARED_ERROR = "MeanSquaredError" + """The Mean Squared Error (MSE) metric.""" + + class RegressionModels(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Enum for all Regression models supported by AutoML.""" @@ -1029,7 +1559,7 @@ class RegressionModels(str, Enum, metaclass=CaseInsensitiveEnumMeta): #: It's an inexact but powerful technique.""" RANDOM_FOREST = "RandomForest" """Random forest is a supervised learning algorithm. - #: The "forest"\ it builds, is an ensemble of decision trees, usually trained with the “bagging”\ + #: The "forest" it builds, is an ensemble of decision trees, usually trained with the bagging #: method. #: The general idea of the bagging method is that a combination of learning models increases the #: overall result.""" @@ -1073,6 +1603,46 @@ class RemoteLoginPortPublicAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): NOT_SPECIFIED = "NotSpecified" +class RollingRateType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """RollingRateType.""" + + YEAR = "Year" + MONTH = "Month" + DAY = "Day" + HOUR = "Hour" + MINUTE = "Minute" + + +class RuleAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The action enum for networking rule.""" + + ALLOW = "Allow" + DENY = "Deny" + + +class RuleCategory(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Category of a managed network Outbound Rule of a machine learning workspace.""" + + REQUIRED = "Required" + RECOMMENDED = "Recommended" + USER_DEFINED = "UserDefined" + + +class RuleStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of a managed network Outbound Rule of a machine learning workspace.""" + + INACTIVE = "Inactive" + ACTIVE = "Active" + + +class RuleType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of a managed network Outbound Rule of a machine learning workspace.""" + + FQDN = "FQDN" + PRIVATE_ENDPOINT = "PrivateEndpoint" + SERVICE_TAG = "ServiceTag" + + class SamplingAlgorithmType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """SamplingAlgorithmType.""" @@ -1093,6 +1663,8 @@ class ScheduleActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): CREATE_JOB = "CreateJob" INVOKE_BATCH_ENDPOINT = "InvokeBatchEndpoint" + IMPORT_DATA = "ImportData" + CREATE_MONITOR = "CreateMonitor" class ScheduleListViewType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -1145,6 +1717,8 @@ class SecretsType(str, Enum, metaclass=CaseInsensitiveEnumMeta): CERTIFICATE = "Certificate" SAS = "Sas" SERVICE_PRINCIPAL = "ServicePrincipal" + KERBEROS_PASSWORD = "KerberosPassword" + KERBEROS_KEYTAB = "KerberosKeytab" class ServiceDataAccessAuthIdentity(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -1202,6 +1776,13 @@ class SourceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): URI = "URI" +class SparkJobEntryType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """SparkJobEntryType.""" + + SPARK_JOB_PYTHON_ENTRY = "SparkJobPythonEntry" + SPARK_JOB_SCALA_ENTRY = "SparkJobScalaEntry" + + class SshPublicAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the public ssh port is open and @@ -1256,6 +1837,14 @@ class Status(str, Enum, metaclass=CaseInsensitiveEnumMeta): OPERATION_NOT_ENABLED_FOR_REGION = "OperationNotEnabledForRegion" +class StatusMessageLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """StatusMessageLevel.""" + + ERROR = "Error" + INFORMATION = "Information" + WARNING = "Warning" + + class StochasticOptimizer(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Stochastic optimizer for image models.""" @@ -1349,6 +1938,24 @@ class TaskType(str, Enum, metaclass=CaseInsensitiveEnumMeta): #: occurrences of entities such as people, locations, organizations, and more.""" +class TextAnnotationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Annotation type of text data.""" + + CLASSIFICATION = "Classification" + NAMED_ENTITY_RECOGNITION = "NamedEntityRecognition" + + +class TrainingMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Training mode dictates whether to use distributed training or not.""" + + AUTO = "Auto" + """Auto mode""" + DISTRIBUTED = "Distributed" + """Distributed training mode""" + NON_DISTRIBUTED = "NonDistributed" + """Non distributed training mode""" + + class TriggerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """TriggerType.""" @@ -1397,12 +2004,6 @@ class ValidationMetricType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """CocoVoc metric.""" -class ValueFormat(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """format for the workspace connection value.""" - - JSON = "JSON" - - class VMPriceOSType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Operating system type used by the VM.""" @@ -1434,6 +2035,12 @@ class VolumeDefinitionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): NPIPE = "npipe" +class WebhookType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the webhook callback service type.""" + + AZURE_DEV_OPS = "AzureDevOps" + + class WeekDay(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Enum of weekday.""" diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models_py3.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models_py3.py index 736189ee6853..fc843dee9674 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models_py3.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models_py3.py @@ -24,17 +24,172 @@ JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object +class WorkspaceConnectionPropertiesV2(_serialization.Model): + """WorkspaceConnectionPropertiesV2. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AccessKeyAuthTypeWorkspaceConnectionProperties, ApiKeyAuthWorkspaceConnectionProperties, + CustomKeysWorkspaceConnectionProperties, ManagedIdentityAuthTypeWorkspaceConnectionProperties, + NoneAuthTypeWorkspaceConnectionProperties, PATAuthTypeWorkspaceConnectionProperties, + SASAuthTypeWorkspaceConnectionProperties, + ServicePrincipalAuthTypeWorkspaceConnectionProperties, + UsernamePasswordAuthTypeWorkspaceConnectionProperties + + All required parameters must be populated in order to send to Azure. + + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", "AccessKey", + "ApiKey", and "CustomKeys". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", + "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", + "CognitiveSearch", "CognitiveService", and "CustomKeys". + :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar expiry_time: + :vartype expiry_time: ~datetime.datetime + :ivar metadata: Any object. + :vartype metadata: JSON + :ivar target: + :vartype target: str + """ + + _validation = { + "auth_type": {"required": True}, + } + + _attribute_map = { + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "metadata": {"key": "metadata", "type": "object"}, + "target": {"key": "target", "type": "str"}, + } + + _subtype_map = { + "auth_type": { + "AccessKey": "AccessKeyAuthTypeWorkspaceConnectionProperties", + "ApiKey": "ApiKeyAuthWorkspaceConnectionProperties", + "CustomKeys": "CustomKeysWorkspaceConnectionProperties", + "ManagedIdentity": "ManagedIdentityAuthTypeWorkspaceConnectionProperties", + "None": "NoneAuthTypeWorkspaceConnectionProperties", + "PAT": "PATAuthTypeWorkspaceConnectionProperties", + "SAS": "SASAuthTypeWorkspaceConnectionProperties", + "ServicePrincipal": "ServicePrincipalAuthTypeWorkspaceConnectionProperties", + "UsernamePassword": "UsernamePasswordAuthTypeWorkspaceConnectionProperties", + } + } + + def __init__( + self, + *, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, + expiry_time: Optional[datetime.datetime] = None, + metadata: Optional[JSON] = None, + target: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", + "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", + "CognitiveSearch", "CognitiveService", and "CustomKeys". + :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :keyword expiry_time: + :paramtype expiry_time: ~datetime.datetime + :keyword metadata: Any object. + :paramtype metadata: JSON + :keyword target: + :paramtype target: str + """ + super().__init__(**kwargs) + self.auth_type: Optional[str] = None + self.category = category + self.expiry_time = expiry_time + self.metadata = metadata + self.target = target + + +class AccessKeyAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): + """AccessKeyAuthTypeWorkspaceConnectionProperties. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", "AccessKey", + "ApiKey", and "CustomKeys". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", + "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", + "CognitiveSearch", "CognitiveService", and "CustomKeys". + :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar expiry_time: + :vartype expiry_time: ~datetime.datetime + :ivar metadata: Any object. + :vartype metadata: JSON + :ivar target: + :vartype target: str + :ivar credentials: + :vartype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionAccessKey + """ + + _validation = { + "auth_type": {"required": True}, + } + + _attribute_map = { + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "metadata": {"key": "metadata", "type": "object"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionAccessKey"}, + } + + def __init__( + self, + *, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, + expiry_time: Optional[datetime.datetime] = None, + metadata: Optional[JSON] = None, + target: Optional[str] = None, + credentials: Optional["_models.WorkspaceConnectionAccessKey"] = None, + **kwargs: Any + ) -> None: + """ + :keyword category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", + "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", + "CognitiveSearch", "CognitiveService", and "CustomKeys". + :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :keyword expiry_time: + :paramtype expiry_time: ~datetime.datetime + :keyword metadata: Any object. + :paramtype metadata: JSON + :keyword target: + :paramtype target: str + :keyword credentials: + :paramtype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionAccessKey + """ + super().__init__(category=category, expiry_time=expiry_time, metadata=metadata, target=target, **kwargs) + self.auth_type: str = "AccessKey" + self.credentials = credentials + + class DatastoreCredentials(_serialization.Model): """Base definition for datastore credentials. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AccountKeyDatastoreCredentials, CertificateDatastoreCredentials, NoneDatastoreCredentials, - SasDatastoreCredentials, ServicePrincipalDatastoreCredentials + AccountKeyDatastoreCredentials, CertificateDatastoreCredentials, KerberosKeytabCredentials, + KerberosPasswordCredentials, NoneDatastoreCredentials, SasDatastoreCredentials, + ServicePrincipalDatastoreCredentials All required parameters must be populated in order to send to Azure. :ivar credentials_type: [Required] Credential type used to authentication with storage. - Required. Known values are: "AccountKey", "Certificate", "None", "Sas", and "ServicePrincipal". + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType """ @@ -50,6 +205,8 @@ class DatastoreCredentials(_serialization.Model): "credentials_type": { "AccountKey": "AccountKeyDatastoreCredentials", "Certificate": "CertificateDatastoreCredentials", + "KerberosKeytab": "KerberosKeytabCredentials", + "KerberosPassword": "KerberosPasswordCredentials", "None": "NoneDatastoreCredentials", "Sas": "SasDatastoreCredentials", "ServicePrincipal": "ServicePrincipalDatastoreCredentials", @@ -68,7 +225,8 @@ class AccountKeyDatastoreCredentials(DatastoreCredentials): All required parameters must be populated in order to send to Azure. :ivar credentials_type: [Required] Credential type used to authentication with storage. - Required. Known values are: "AccountKey", "Certificate", "None", "Sas", and "ServicePrincipal". + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType :ivar secrets: [Required] Storage account secrets. Required. :vartype secrets: ~azure.mgmt.machinelearningservices.models.AccountKeyDatastoreSecrets @@ -98,13 +256,14 @@ class DatastoreSecrets(_serialization.Model): """Base definition for datastore secrets. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AccountKeyDatastoreSecrets, CertificateDatastoreSecrets, SasDatastoreSecrets, - ServicePrincipalDatastoreSecrets + AccountKeyDatastoreSecrets, CertificateDatastoreSecrets, KerberosKeytabSecrets, + KerberosPasswordSecrets, SasDatastoreSecrets, ServicePrincipalDatastoreSecrets All required parameters must be populated in order to send to Azure. :ivar secrets_type: [Required] Credential type used to authentication with storage. Required. - Known values are: "AccountKey", "Certificate", "Sas", and "ServicePrincipal". + Known values are: "AccountKey", "Certificate", "Sas", "ServicePrincipal", "KerberosPassword", + and "KerberosKeytab". :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType """ @@ -120,6 +279,8 @@ class DatastoreSecrets(_serialization.Model): "secrets_type": { "AccountKey": "AccountKeyDatastoreSecrets", "Certificate": "CertificateDatastoreSecrets", + "KerberosKeytab": "KerberosKeytabSecrets", + "KerberosPassword": "KerberosPasswordSecrets", "Sas": "SasDatastoreSecrets", "ServicePrincipal": "ServicePrincipalDatastoreSecrets", } @@ -137,7 +298,8 @@ class AccountKeyDatastoreSecrets(DatastoreSecrets): All required parameters must be populated in order to send to Azure. :ivar secrets_type: [Required] Credential type used to authentication with storage. Required. - Known values are: "AccountKey", "Certificate", "Sas", and "ServicePrincipal". + Known values are: "AccountKey", "Certificate", "Sas", "ServicePrincipal", "KerberosPassword", + and "KerberosKeytab". :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType :ivar key: Storage account key. :vartype key: str @@ -202,6 +364,44 @@ def __init__( self.user_created_acr_account = user_created_acr_account +class ActualCapacityInfo(_serialization.Model): + """ActualCapacityInfo. + + :ivar allocated: Gets or sets the total number of instances for the group. + :vartype allocated: int + :ivar assignment_failed: Gets or sets the number of instances which failed to successfully + complete assignment. + :vartype assignment_failed: int + :ivar assignment_success: Gets or sets the number of instances which successfully completed + assignment. + :vartype assignment_success: int + """ + + _attribute_map = { + "allocated": {"key": "allocated", "type": "int"}, + "assignment_failed": {"key": "assignmentFailed", "type": "int"}, + "assignment_success": {"key": "assignmentSuccess", "type": "int"}, + } + + def __init__( + self, *, allocated: int = 0, assignment_failed: int = 0, assignment_success: int = 0, **kwargs: Any + ) -> None: + """ + :keyword allocated: Gets or sets the total number of instances for the group. + :paramtype allocated: int + :keyword assignment_failed: Gets or sets the number of instances which failed to successfully + complete assignment. + :paramtype assignment_failed: int + :keyword assignment_success: Gets or sets the number of instances which successfully completed + assignment. + :paramtype assignment_success: int + """ + super().__init__(**kwargs) + self.allocated = allocated + self.assignment_failed = assignment_failed + self.assignment_success = assignment_success + + class AKSSchema(_serialization.Model): """AKSSchema. @@ -728,6 +928,69 @@ def __init__( self.load_balancer_subnet = load_balancer_subnet +class MonitoringFeatureFilterBase(_serialization.Model): + """MonitoringFeatureFilterBase. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AllFeatures, FeatureSubset, TopNFeaturesByAttribution + + All required parameters must be populated in order to send to Azure. + + :ivar filter_type: [Required] Specifies the feature filter to leverage when selecting features + to calculate metrics over. Required. Known values are: "AllFeatures", "TopNByAttribution", and + "FeatureSubset". + :vartype filter_type: str or + ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterType + """ + + _validation = { + "filter_type": {"required": True}, + } + + _attribute_map = { + "filter_type": {"key": "filterType", "type": "str"}, + } + + _subtype_map = { + "filter_type": { + "AllFeatures": "AllFeatures", + "FeatureSubset": "FeatureSubset", + "TopNByAttribution": "TopNFeaturesByAttribution", + } + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.filter_type: Optional[str] = None + + +class AllFeatures(MonitoringFeatureFilterBase): + """AllFeatures. + + All required parameters must be populated in order to send to Azure. + + :ivar filter_type: [Required] Specifies the feature filter to leverage when selecting features + to calculate metrics over. Required. Known values are: "AllFeatures", "TopNByAttribution", and + "FeatureSubset". + :vartype filter_type: str or + ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterType + """ + + _validation = { + "filter_type": {"required": True}, + } + + _attribute_map = { + "filter_type": {"key": "filterType", "type": "str"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.filter_type: str = "AllFeatures" + + class Nodes(_serialization.Model): """Abstract Nodes definition. @@ -736,7 +999,8 @@ class Nodes(_serialization.Model): All required parameters must be populated in order to send to Azure. - :ivar nodes_value_type: [Required] Type of the Nodes value. Required. "All" + :ivar nodes_value_type: [Required] Type of the Nodes value. Required. Known values are: "All" + and "Custom". :vartype nodes_value_type: str or ~azure.mgmt.machinelearningservices.models.NodesValueType """ @@ -761,7 +1025,8 @@ class AllNodes(Nodes): All required parameters must be populated in order to send to Azure. - :ivar nodes_value_type: [Required] Type of the Nodes value. Required. "All" + :ivar nodes_value_type: [Required] Type of the Nodes value. Required. Known values are: "All" + and "Custom". :vartype nodes_value_type: str or ~azure.mgmt.machinelearningservices.models.NodesValueType """ @@ -1148,94 +1413,56 @@ def __init__( class AmlOperation(_serialization.Model): - """Azure Machine Learning workspace REST API operation. + """Azure Machine Learning team account REST API operation. - :ivar name: Operation name: {provider}/{resource}/{operation}. - :vartype name: str - :ivar display: Display name of operation. - :vartype display: ~azure.mgmt.machinelearningservices.models.AmlOperationDisplay + :ivar display: Gets or sets display name of operation. + :vartype display: ~azure.mgmt.machinelearningservices.models.OperationDisplay :ivar is_data_action: Indicates whether the operation applies to data-plane. :vartype is_data_action: bool + :ivar name: Gets or sets operation name: {provider}/{resource}/{operation}. + :vartype name: str + :ivar origin: The intended executor of the operation: user/system. + :vartype origin: str """ _attribute_map = { - "name": {"key": "name", "type": "str"}, - "display": {"key": "display", "type": "AmlOperationDisplay"}, + "display": {"key": "display", "type": "OperationDisplay"}, "is_data_action": {"key": "isDataAction", "type": "bool"}, + "name": {"key": "name", "type": "str"}, + "origin": {"key": "origin", "type": "str"}, } def __init__( self, *, - name: Optional[str] = None, - display: Optional["_models.AmlOperationDisplay"] = None, + display: Optional["_models.OperationDisplay"] = None, is_data_action: Optional[bool] = None, + name: Optional[str] = None, + origin: Optional[str] = None, **kwargs: Any ) -> None: """ - :keyword name: Operation name: {provider}/{resource}/{operation}. - :paramtype name: str - :keyword display: Display name of operation. - :paramtype display: ~azure.mgmt.machinelearningservices.models.AmlOperationDisplay + :keyword display: Gets or sets display name of operation. + :paramtype display: ~azure.mgmt.machinelearningservices.models.OperationDisplay :keyword is_data_action: Indicates whether the operation applies to data-plane. :paramtype is_data_action: bool + :keyword name: Gets or sets operation name: {provider}/{resource}/{operation}. + :paramtype name: str + :keyword origin: The intended executor of the operation: user/system. + :paramtype origin: str """ super().__init__(**kwargs) - self.name = name self.display = display self.is_data_action = is_data_action - - -class AmlOperationDisplay(_serialization.Model): - """Display name of operation. - - :ivar provider: The resource provider name: Microsoft.MachineLearningExperimentation. - :vartype provider: str - :ivar resource: The resource on which the operation is performed. - :vartype resource: str - :ivar operation: The operation that users can perform. - :vartype operation: str - :ivar description: The description for the operation. - :vartype description: str - """ - - _attribute_map = { - "provider": {"key": "provider", "type": "str"}, - "resource": {"key": "resource", "type": "str"}, - "operation": {"key": "operation", "type": "str"}, - "description": {"key": "description", "type": "str"}, - } - - def __init__( - self, - *, - provider: Optional[str] = None, - resource: Optional[str] = None, - operation: Optional[str] = None, - description: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword provider: The resource provider name: Microsoft.MachineLearningExperimentation. - :paramtype provider: str - :keyword resource: The resource on which the operation is performed. - :paramtype resource: str - :keyword operation: The operation that users can perform. - :paramtype operation: str - :keyword description: The description for the operation. - :paramtype description: str - """ - super().__init__(**kwargs) - self.provider = provider - self.resource = resource - self.operation = operation - self.description = description + self.name = name + self.origin = origin class AmlOperationListResult(_serialization.Model): """An array of operations supported by the resource provider. - :ivar value: List of AML workspace operations supported by the AML workspace resource provider. + :ivar value: Gets or sets list of AML team account operations supported by the + AML team account resource provider. :vartype value: list[~azure.mgmt.machinelearningservices.models.AmlOperation] """ @@ -1245,8 +1472,8 @@ class AmlOperationListResult(_serialization.Model): def __init__(self, *, value: Optional[List["_models.AmlOperation"]] = None, **kwargs: Any) -> None: """ - :keyword value: List of AML workspace operations supported by the AML workspace resource - provider. + :keyword value: Gets or sets list of AML team account operations supported by the + AML team account resource provider. :paramtype value: list[~azure.mgmt.machinelearningservices.models.AmlOperation] """ super().__init__(**kwargs) @@ -1310,6 +1537,63 @@ def __init__(self, **kwargs: Any) -> None: self.identity_type: str = "AMLToken" +class MonitorComputeIdentityBase(_serialization.Model): + """Monitor compute identity base definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AmlTokenComputeIdentity, ManagedComputeIdentity + + All required parameters must be populated in order to send to Azure. + + :ivar compute_identity_type: [Required] Monitor compute identity type enum. Required. Known + values are: "AmlToken" and "ManagedIdentity". + :vartype compute_identity_type: str or + ~azure.mgmt.machinelearningservices.models.MonitorComputeIdentityType + """ + + _validation = { + "compute_identity_type": {"required": True}, + } + + _attribute_map = { + "compute_identity_type": {"key": "computeIdentityType", "type": "str"}, + } + + _subtype_map = { + "compute_identity_type": {"AmlToken": "AmlTokenComputeIdentity", "ManagedIdentity": "ManagedComputeIdentity"} + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.compute_identity_type: Optional[str] = None + + +class AmlTokenComputeIdentity(MonitorComputeIdentityBase): + """AML token compute identity definition. + + All required parameters must be populated in order to send to Azure. + + :ivar compute_identity_type: [Required] Monitor compute identity type enum. Required. Known + values are: "AmlToken" and "ManagedIdentity". + :vartype compute_identity_type: str or + ~azure.mgmt.machinelearningservices.models.MonitorComputeIdentityType + """ + + _validation = { + "compute_identity_type": {"required": True}, + } + + _attribute_map = { + "compute_identity_type": {"key": "computeIdentityType", "type": "str"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.compute_identity_type: str = "AmlToken" + + class AmlUserFeature(_serialization.Model): """Features enabled for a workspace. @@ -1349,34 +1633,123 @@ def __init__( self.description = description -class ArmResourceId(_serialization.Model): - """ARM ResourceId of a resource. - - :ivar resource_id: Arm ResourceId is in the format - "/subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroupName}/providers/Microsoft.Storage/storageAccounts/{StorageAccountName}" - or - "/subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{AcrName}". - :vartype resource_id: str - """ +class ApiKeyAuthWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): + """This connection type covers the generic ApiKey auth connection categories, for examples: + AzureOpenAI: + Category:= AzureOpenAI + AuthType:= ApiKey (as type discriminator) + Credentials:= {ApiKey} as + Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.ApiKey + Target:= {ApiBase} - _attribute_map = { - "resource_id": {"key": "resourceId", "type": "str"}, - } + CognitiveService: + Category:= CognitiveService + AuthType:= ApiKey (as type discriminator) + Credentials:= {SubscriptionKey} as + Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.ApiKey + Target:= ServiceRegion={serviceRegion} - def __init__(self, *, resource_id: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword resource_id: Arm ResourceId is in the format - "/subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroupName}/providers/Microsoft.Storage/storageAccounts/{StorageAccountName}" - or - "/subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{AcrName}". - :paramtype resource_id: str - """ - super().__init__(**kwargs) - self.resource_id = resource_id + CognitiveSearch: + Category:= CognitiveSearch + AuthType:= ApiKey (as type discriminator) + Credentials:= {Key} as + Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.ApiKey + Target:= {Endpoint} + Use Metadata property bag for ApiType, ApiVersion, Kind and other metadata fields. -class ResourceBase(_serialization.Model): - """ResourceBase. + All required parameters must be populated in order to send to Azure. + + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", "AccessKey", + "ApiKey", and "CustomKeys". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", + "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", + "CognitiveSearch", "CognitiveService", and "CustomKeys". + :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar expiry_time: + :vartype expiry_time: ~datetime.datetime + :ivar metadata: Any object. + :vartype metadata: JSON + :ivar target: + :vartype target: str + :ivar credentials: Api key object for workspace connection credential. + :vartype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionApiKey + """ + + _validation = { + "auth_type": {"required": True}, + } + + _attribute_map = { + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "metadata": {"key": "metadata", "type": "object"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionApiKey"}, + } + + def __init__( + self, + *, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, + expiry_time: Optional[datetime.datetime] = None, + metadata: Optional[JSON] = None, + target: Optional[str] = None, + credentials: Optional["_models.WorkspaceConnectionApiKey"] = None, + **kwargs: Any + ) -> None: + """ + :keyword category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", + "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", + "CognitiveSearch", "CognitiveService", and "CustomKeys". + :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :keyword expiry_time: + :paramtype expiry_time: ~datetime.datetime + :keyword metadata: Any object. + :paramtype metadata: JSON + :keyword target: + :paramtype target: str + :keyword credentials: Api key object for workspace connection credential. + :paramtype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionApiKey + """ + super().__init__(category=category, expiry_time=expiry_time, metadata=metadata, target=target, **kwargs) + self.auth_type: str = "ApiKey" + self.credentials = credentials + + +class ArmResourceId(_serialization.Model): + """ARM ResourceId of a resource. + + :ivar resource_id: Arm ResourceId is in the format + "/subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroupName}/providers/Microsoft.Storage/storageAccounts/{StorageAccountName}" + or + "/subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{AcrName}". + :vartype resource_id: str + """ + + _attribute_map = { + "resource_id": {"key": "resourceId", "type": "str"}, + } + + def __init__(self, *, resource_id: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword resource_id: Arm ResourceId is in the format + "/subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroupName}/providers/Microsoft.Storage/storageAccounts/{StorageAccountName}" + or + "/subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{AcrName}". + :paramtype resource_id: str + """ + super().__init__(**kwargs) + self.resource_id = resource_id + + +class ResourceBase(_serialization.Model): + """ResourceBase. :ivar description: The asset description text. :vartype description: str @@ -1423,9 +1796,13 @@ class AssetBase(ResourceBase): :vartype properties: dict[str, str] :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] - :ivar is_anonymous: If the name version are system generated (anonymous registration). + :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :ivar is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. :vartype is_anonymous: bool - :ivar is_archived: Is the asset archived?. + :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. :vartype is_archived: bool """ @@ -1433,6 +1810,7 @@ class AssetBase(ResourceBase): "description": {"key": "description", "type": "str"}, "properties": {"key": "properties", "type": "{str}"}, "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, "is_anonymous": {"key": "isAnonymous", "type": "bool"}, "is_archived": {"key": "isArchived", "type": "bool"}, } @@ -1443,6 +1821,7 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, is_anonymous: bool = False, is_archived: bool = False, **kwargs: Any @@ -1454,12 +1833,17 @@ def __init__( :paramtype properties: dict[str, str] :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] - :keyword is_anonymous: If the name version are system generated (anonymous registration). + :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :keyword is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. :paramtype is_anonymous: bool - :keyword is_archived: Is the asset archived?. + :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. :paramtype is_archived: bool """ super().__init__(description=description, properties=properties, tags=tags, **kwargs) + self.auto_delete_setting = auto_delete_setting self.is_anonymous = is_anonymous self.is_archived = is_archived @@ -1561,13 +1945,23 @@ def __init__( class AssetJobOutput(_serialization.Model): """Asset output type. - :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount" and "Upload". + :ivar asset_name: Output Asset Name. + :vartype asset_name: str + :ivar asset_version: Output Asset Version. + :vartype asset_version: str + :ivar auto_delete_setting: Auto delete setting of output data asset. + :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :ivar uri: Output Asset URI. :vartype uri: str """ _attribute_map = { + "asset_name": {"key": "assetName", "type": "str"}, + "asset_version": {"key": "assetVersion", "type": "str"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, "mode": {"key": "mode", "type": "str"}, "uri": {"key": "uri", "type": "str"}, } @@ -1575,17 +1969,30 @@ class AssetJobOutput(_serialization.Model): def __init__( self, *, + asset_name: Optional[str] = None, + asset_version: Optional[str] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, uri: Optional[str] = None, **kwargs: Any ) -> None: """ - :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount" and "Upload". + :keyword asset_name: Output Asset Name. + :paramtype asset_name: str + :keyword asset_version: Output Asset Version. + :paramtype asset_version: str + :keyword auto_delete_setting: Auto delete setting of output data asset. + :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :keyword uri: Output Asset URI. :paramtype uri: str """ super().__init__(**kwargs) + self.asset_name = asset_name + self.asset_version = asset_version + self.auto_delete_setting = auto_delete_setting self.mode = mode self.uri = uri @@ -1658,6 +2065,40 @@ def __init__(self, *, object_id: str, tenant_id: str, **kwargs: Any) -> None: self.tenant_id = tenant_id +class AutoDeleteSetting(_serialization.Model): + """AutoDeleteSetting. + + :ivar condition: When to check if an asset is expired. Known values are: "CreatedGreaterThan" + and "LastAccessedGreaterThan". + :vartype condition: str or ~azure.mgmt.machinelearningservices.models.AutoDeleteCondition + :ivar value: Expiration condition value. + :vartype value: str + """ + + _attribute_map = { + "condition": {"key": "condition", "type": "str"}, + "value": {"key": "value", "type": "str"}, + } + + def __init__( + self, + *, + condition: Optional[Union[str, "_models.AutoDeleteCondition"]] = None, + value: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword condition: When to check if an asset is expired. Known values are: + "CreatedGreaterThan" and "LastAccessedGreaterThan". + :paramtype condition: str or ~azure.mgmt.machinelearningservices.models.AutoDeleteCondition + :keyword value: Expiration condition value. + :paramtype value: str + """ + super().__init__(**kwargs) + self.condition = condition + self.value = value + + class ForecastHorizon(_serialization.Model): """The desired maximum forecast horizon in units of time-series frequency. @@ -1711,11 +2152,41 @@ def __init__(self, **kwargs: Any) -> None: self.mode: str = "Auto" +class AutologgerSettings(_serialization.Model): + """Settings for Autologger. + + All required parameters must be populated in order to send to Azure. + + :ivar mlflow_autologger: [Required] Indicates whether mlflow autologger is enabled. Required. + Known values are: "Enabled" and "Disabled". + :vartype mlflow_autologger: str or + ~azure.mgmt.machinelearningservices.models.MLFlowAutologgerState + """ + + _validation = { + "mlflow_autologger": {"required": True}, + } + + _attribute_map = { + "mlflow_autologger": {"key": "mlflowAutologger", "type": "str"}, + } + + def __init__(self, *, mlflow_autologger: Union[str, "_models.MLFlowAutologgerState"], **kwargs: Any) -> None: + """ + :keyword mlflow_autologger: [Required] Indicates whether mlflow autologger is enabled. + Required. Known values are: "Enabled" and "Disabled". + :paramtype mlflow_autologger: str or + ~azure.mgmt.machinelearningservices.models.MLFlowAutologgerState + """ + super().__init__(**kwargs) + self.mlflow_autologger = mlflow_autologger + + class JobBaseProperties(ResourceBase): # pylint: disable=too-many-instance-attributes """Base definition for a job. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AutoMLJob, CommandJob, PipelineJob, SweepJob + AutoMLJob, CommandJob, LabelingJobProperties, PipelineJob, SparkJob, SweepJob Variables are only populated by the server, and will be ignored when sending a request. @@ -1743,14 +2214,19 @@ class JobBaseProperties(ResourceBase): # pylint: disable=too-many-instance-attr :ivar is_archived: Is the asset archived?. :vartype is_archived: bool :ivar job_type: [Required] Specifies the type of job. Required. Known values are: "AutoML", - "Command", "Sweep", and "Pipeline". + "Command", "Labeling", "Sweep", "Pipeline", and "Spark". :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :ivar notification_setting: Notification setting for the job. + :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting + :ivar secrets_configuration: Configuration for secrets to be made available during runtime. + :vartype secrets_configuration: dict[str, + ~azure.mgmt.machinelearningservices.models.SecretConfiguration] :ivar services: List of JobEndpoints. For local jobs, a job endpoint will have an endpoint value of FileStreamObject. :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", - "Canceled", "NotResponding", "Paused", and "Unknown". + "Canceled", "NotResponding", "Paused", "Unknown", and "Scheduled". :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus """ @@ -1770,12 +2246,21 @@ class JobBaseProperties(ResourceBase): # pylint: disable=too-many-instance-attr "identity": {"key": "identity", "type": "IdentityConfiguration"}, "is_archived": {"key": "isArchived", "type": "bool"}, "job_type": {"key": "jobType", "type": "str"}, + "notification_setting": {"key": "notificationSetting", "type": "NotificationSetting"}, + "secrets_configuration": {"key": "secretsConfiguration", "type": "{SecretConfiguration}"}, "services": {"key": "services", "type": "{JobService}"}, "status": {"key": "status", "type": "str"}, } _subtype_map = { - "job_type": {"AutoML": "AutoMLJob", "Command": "CommandJob", "Pipeline": "PipelineJob", "Sweep": "SweepJob"} + "job_type": { + "AutoML": "AutoMLJob", + "Command": "CommandJob", + "Labeling": "LabelingJobProperties", + "Pipeline": "PipelineJob", + "Spark": "SparkJob", + "Sweep": "SweepJob", + } } def __init__( @@ -1790,6 +2275,8 @@ def __init__( experiment_name: str = "Default", identity: Optional["_models.IdentityConfiguration"] = None, is_archived: bool = False, + notification_setting: Optional["_models.NotificationSetting"] = None, + secrets_configuration: Optional[Dict[str, "_models.SecretConfiguration"]] = None, services: Optional[Dict[str, "_models.JobService"]] = None, **kwargs: Any ) -> None: @@ -1815,6 +2302,11 @@ def __init__( :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration :keyword is_archived: Is the asset archived?. :paramtype is_archived: bool + :keyword notification_setting: Notification setting for the job. + :paramtype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting + :keyword secrets_configuration: Configuration for secrets to be made available during runtime. + :paramtype secrets_configuration: dict[str, + ~azure.mgmt.machinelearningservices.models.SecretConfiguration] :keyword services: List of JobEndpoints. For local jobs, a job endpoint will have an endpoint value of FileStreamObject. :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] @@ -1827,6 +2319,8 @@ def __init__( self.identity = identity self.is_archived = is_archived self.job_type: Optional[str] = None + self.notification_setting = notification_setting + self.secrets_configuration = secrets_configuration self.services = services self.status = None @@ -1862,14 +2356,19 @@ class AutoMLJob(JobBaseProperties): # pylint: disable=too-many-instance-attribu :ivar is_archived: Is the asset archived?. :vartype is_archived: bool :ivar job_type: [Required] Specifies the type of job. Required. Known values are: "AutoML", - "Command", "Sweep", and "Pipeline". + "Command", "Labeling", "Sweep", "Pipeline", and "Spark". :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :ivar notification_setting: Notification setting for the job. + :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting + :ivar secrets_configuration: Configuration for secrets to be made available during runtime. + :vartype secrets_configuration: dict[str, + ~azure.mgmt.machinelearningservices.models.SecretConfiguration] :ivar services: List of JobEndpoints. For local jobs, a job endpoint will have an endpoint value of FileStreamObject. :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", - "Canceled", "NotResponding", "Paused", and "Unknown". + "Canceled", "NotResponding", "Paused", "Unknown", and "Scheduled". :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus :ivar environment_id: The ARM resource ID of the Environment specification for the job. This is optional value to provide, if not provided, AutoML will default this to Production @@ -1879,6 +2378,8 @@ class AutoMLJob(JobBaseProperties): # pylint: disable=too-many-instance-attribu :vartype environment_variables: dict[str, str] :ivar outputs: Mapping of output data bindings used in the job. :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :ivar queue_settings: Queue settings for the job. + :vartype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings :ivar resources: Compute Resource configuration for the job. :vartype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration :ivar task_details: [Required] This represents scenario which can be one of Tables/NLP/Image. @@ -1903,11 +2404,14 @@ class AutoMLJob(JobBaseProperties): # pylint: disable=too-many-instance-attribu "identity": {"key": "identity", "type": "IdentityConfiguration"}, "is_archived": {"key": "isArchived", "type": "bool"}, "job_type": {"key": "jobType", "type": "str"}, + "notification_setting": {"key": "notificationSetting", "type": "NotificationSetting"}, + "secrets_configuration": {"key": "secretsConfiguration", "type": "{SecretConfiguration}"}, "services": {"key": "services", "type": "{JobService}"}, "status": {"key": "status", "type": "str"}, "environment_id": {"key": "environmentId", "type": "str"}, "environment_variables": {"key": "environmentVariables", "type": "{str}"}, "outputs": {"key": "outputs", "type": "{JobOutput}"}, + "queue_settings": {"key": "queueSettings", "type": "QueueSettings"}, "resources": {"key": "resources", "type": "JobResourceConfiguration"}, "task_details": {"key": "taskDetails", "type": "AutoMLVertical"}, } @@ -1925,10 +2429,13 @@ def __init__( experiment_name: str = "Default", identity: Optional["_models.IdentityConfiguration"] = None, is_archived: bool = False, + notification_setting: Optional["_models.NotificationSetting"] = None, + secrets_configuration: Optional[Dict[str, "_models.SecretConfiguration"]] = None, services: Optional[Dict[str, "_models.JobService"]] = None, environment_id: Optional[str] = None, environment_variables: Optional[Dict[str, str]] = None, outputs: Optional[Dict[str, "_models.JobOutput"]] = None, + queue_settings: Optional["_models.QueueSettings"] = None, resources: Optional["_models.JobResourceConfiguration"] = None, **kwargs: Any ) -> None: @@ -1954,6 +2461,11 @@ def __init__( :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration :keyword is_archived: Is the asset archived?. :paramtype is_archived: bool + :keyword notification_setting: Notification setting for the job. + :paramtype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting + :keyword secrets_configuration: Configuration for secrets to be made available during runtime. + :paramtype secrets_configuration: dict[str, + ~azure.mgmt.machinelearningservices.models.SecretConfiguration] :keyword services: List of JobEndpoints. For local jobs, a job endpoint will have an endpoint value of FileStreamObject. :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] @@ -1965,6 +2477,8 @@ def __init__( :paramtype environment_variables: dict[str, str] :keyword outputs: Mapping of output data bindings used in the job. :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :keyword queue_settings: Queue settings for the job. + :paramtype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings :keyword resources: Compute Resource configuration for the job. :paramtype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration :keyword task_details: [Required] This represents scenario which can be one of @@ -1981,6 +2495,8 @@ def __init__( experiment_name=experiment_name, identity=identity, is_archived=is_archived, + notification_setting=notification_setting, + secrets_configuration=secrets_configuration, services=services, **kwargs ) @@ -1988,6 +2504,7 @@ def __init__( self.environment_id = environment_id self.environment_variables = environment_variables self.outputs = outputs + self.queue_settings = queue_settings self.resources = resources self.task_details = task_details @@ -2347,11 +2864,72 @@ def __init__(self, **kwargs: Any) -> None: self.mode: str = "Auto" +class MonitoringAlertNotificationSettingsBase(_serialization.Model): + """MonitoringAlertNotificationSettingsBase. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzMonMonitoringAlertNotificationSettings, EmailMonitoringAlertNotificationSettings + + All required parameters must be populated in order to send to Azure. + + :ivar alert_notification_type: [Required] Specifies the type of signal to monitor. Required. + Known values are: "AzureMonitor" and "Email". + :vartype alert_notification_type: str or + ~azure.mgmt.machinelearningservices.models.MonitoringAlertNotificationType + """ + + _validation = { + "alert_notification_type": {"required": True}, + } + + _attribute_map = { + "alert_notification_type": {"key": "alertNotificationType", "type": "str"}, + } + + _subtype_map = { + "alert_notification_type": { + "AzureMonitor": "AzMonMonitoringAlertNotificationSettings", + "Email": "EmailMonitoringAlertNotificationSettings", + } + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.alert_notification_type: Optional[str] = None + + +class AzMonMonitoringAlertNotificationSettings(MonitoringAlertNotificationSettingsBase): + """AzMonMonitoringAlertNotificationSettings. + + All required parameters must be populated in order to send to Azure. + + :ivar alert_notification_type: [Required] Specifies the type of signal to monitor. Required. + Known values are: "AzureMonitor" and "Email". + :vartype alert_notification_type: str or + ~azure.mgmt.machinelearningservices.models.MonitoringAlertNotificationType + """ + + _validation = { + "alert_notification_type": {"required": True}, + } + + _attribute_map = { + "alert_notification_type": {"key": "alertNotificationType", "type": "str"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.alert_notification_type: str = "AzureMonitor" + + class DatastoreProperties(ResourceBase): """Base definition for datastore contents configuration. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AzureBlobDatastore, AzureDataLakeGen1Datastore, AzureDataLakeGen2Datastore, AzureFileDatastore + AzureBlobDatastore, AzureDataLakeGen1Datastore, AzureDataLakeGen2Datastore, AzureFileDatastore, + HdfsDatastore, OneLakeDatastore Variables are only populated by the server, and will be ignored when sending a request. @@ -2366,8 +2944,10 @@ class DatastoreProperties(ResourceBase): :ivar credentials: [Required] Account credentials. Required. :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials :ivar datastore_type: [Required] Storage type backing the datastore. Required. Known values - are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", and "AzureFile". + are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile", "Hdfs", and "OneLake". :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType + :ivar intellectual_property: Intellectual Property details. + :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty :ivar is_default: Readonly property to indicate if datastore is the workspace default datastore. :vartype is_default: bool @@ -2385,6 +2965,7 @@ class DatastoreProperties(ResourceBase): "tags": {"key": "tags", "type": "{str}"}, "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, "datastore_type": {"key": "datastoreType", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, "is_default": {"key": "isDefault", "type": "bool"}, } @@ -2394,6 +2975,8 @@ class DatastoreProperties(ResourceBase): "AzureDataLakeGen1": "AzureDataLakeGen1Datastore", "AzureDataLakeGen2": "AzureDataLakeGen2Datastore", "AzureFile": "AzureFileDatastore", + "Hdfs": "HdfsDatastore", + "OneLake": "OneLakeDatastore", } } @@ -2404,6 +2987,7 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, + intellectual_property: Optional["_models.IntellectualProperty"] = None, **kwargs: Any ) -> None: """ @@ -2415,14 +2999,46 @@ def __init__( :paramtype tags: dict[str, str] :keyword credentials: [Required] Account credentials. Required. :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :keyword intellectual_property: Intellectual Property details. + :paramtype intellectual_property: + ~azure.mgmt.machinelearningservices.models.IntellectualProperty """ super().__init__(description=description, properties=properties, tags=tags, **kwargs) self.credentials = credentials self.datastore_type: Optional[str] = None + self.intellectual_property = intellectual_property self.is_default = None -class AzureBlobDatastore(DatastoreProperties): # pylint: disable=too-many-instance-attributes +class AzureDatastore(_serialization.Model): + """Base definition for Azure datastore contents configuration. + + :ivar resource_group: Azure Resource Group name. + :vartype resource_group: str + :ivar subscription_id: Azure Subscription Id. + :vartype subscription_id: str + """ + + _attribute_map = { + "resource_group": {"key": "resourceGroup", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, + } + + def __init__( + self, *, resource_group: Optional[str] = None, subscription_id: Optional[str] = None, **kwargs: Any + ) -> None: + """ + :keyword resource_group: Azure Resource Group name. + :paramtype resource_group: str + :keyword subscription_id: Azure Subscription Id. + :paramtype subscription_id: str + """ + super().__init__(**kwargs) + self.resource_group = resource_group + self.subscription_id = subscription_id + + +class AzureBlobDatastore(AzureDatastore, DatastoreProperties): # pylint: disable=too-many-instance-attributes """Azure Blob datastore configuration. Variables are only populated by the server, and will be ignored when sending a request. @@ -2438,11 +3054,17 @@ class AzureBlobDatastore(DatastoreProperties): # pylint: disable=too-many-insta :ivar credentials: [Required] Account credentials. Required. :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials :ivar datastore_type: [Required] Storage type backing the datastore. Required. Known values - are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", and "AzureFile". + are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile", "Hdfs", and "OneLake". :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType + :ivar intellectual_property: Intellectual Property details. + :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty :ivar is_default: Readonly property to indicate if datastore is the workspace default datastore. :vartype is_default: bool + :ivar resource_group: Azure Resource Group name. + :vartype resource_group: str + :ivar subscription_id: Azure Subscription Id. + :vartype subscription_id: str :ivar account_name: Storage account name. :vartype account_name: str :ivar container_name: Storage account container name. @@ -2470,7 +3092,10 @@ class AzureBlobDatastore(DatastoreProperties): # pylint: disable=too-many-insta "tags": {"key": "tags", "type": "{str}"}, "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, "datastore_type": {"key": "datastoreType", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, "is_default": {"key": "isDefault", "type": "bool"}, + "resource_group": {"key": "resourceGroup", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, "account_name": {"key": "accountName", "type": "str"}, "container_name": {"key": "containerName", "type": "str"}, "endpoint": {"key": "endpoint", "type": "str"}, @@ -2485,6 +3110,9 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, + intellectual_property: Optional["_models.IntellectualProperty"] = None, + resource_group: Optional[str] = None, + subscription_id: Optional[str] = None, account_name: Optional[str] = None, container_name: Optional[str] = None, endpoint: Optional[str] = None, @@ -2501,6 +3129,13 @@ def __init__( :paramtype tags: dict[str, str] :keyword credentials: [Required] Account credentials. Required. :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :keyword intellectual_property: Intellectual Property details. + :paramtype intellectual_property: + ~azure.mgmt.machinelearningservices.models.IntellectualProperty + :keyword resource_group: Azure Resource Group name. + :paramtype resource_group: str + :keyword subscription_id: Azure Subscription Id. + :paramtype subscription_id: str :keyword account_name: Storage account name. :paramtype account_name: str :keyword container_name: Storage account container name. @@ -2515,16 +3150,33 @@ def __init__( :paramtype service_data_access_auth_identity: str or ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity """ - super().__init__(description=description, properties=properties, tags=tags, credentials=credentials, **kwargs) + super().__init__( + resource_group=resource_group, + subscription_id=subscription_id, + description=description, + properties=properties, + tags=tags, + credentials=credentials, + intellectual_property=intellectual_property, + **kwargs + ) + self.description = description + self.properties = properties + self.tags = tags + self.credentials = credentials self.datastore_type: str = "AzureBlob" + self.intellectual_property = intellectual_property + self.is_default = None self.account_name = account_name self.container_name = container_name self.endpoint = endpoint self.protocol = protocol self.service_data_access_auth_identity = service_data_access_auth_identity + self.resource_group = resource_group + self.subscription_id = subscription_id -class AzureDataLakeGen1Datastore(DatastoreProperties): +class AzureDataLakeGen1Datastore(AzureDatastore, DatastoreProperties): # pylint: disable=too-many-instance-attributes """Azure Data Lake Gen1 datastore configuration. Variables are only populated by the server, and will be ignored when sending a request. @@ -2540,11 +3192,17 @@ class AzureDataLakeGen1Datastore(DatastoreProperties): :ivar credentials: [Required] Account credentials. Required. :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials :ivar datastore_type: [Required] Storage type backing the datastore. Required. Known values - are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", and "AzureFile". + are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile", "Hdfs", and "OneLake". :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType + :ivar intellectual_property: Intellectual Property details. + :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty :ivar is_default: Readonly property to indicate if datastore is the workspace default datastore. :vartype is_default: bool + :ivar resource_group: Azure Resource Group name. + :vartype resource_group: str + :ivar subscription_id: Azure Subscription Id. + :vartype subscription_id: str :ivar service_data_access_auth_identity: Indicates which identity to use to authenticate service data access to customer's storage. Known values are: "None", "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". @@ -2567,7 +3225,10 @@ class AzureDataLakeGen1Datastore(DatastoreProperties): "tags": {"key": "tags", "type": "{str}"}, "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, "datastore_type": {"key": "datastoreType", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, "is_default": {"key": "isDefault", "type": "bool"}, + "resource_group": {"key": "resourceGroup", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, "service_data_access_auth_identity": {"key": "serviceDataAccessAuthIdentity", "type": "str"}, "store_name": {"key": "storeName", "type": "str"}, } @@ -2580,6 +3241,9 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, + intellectual_property: Optional["_models.IntellectualProperty"] = None, + resource_group: Optional[str] = None, + subscription_id: Optional[str] = None, service_data_access_auth_identity: Optional[Union[str, "_models.ServiceDataAccessAuthIdentity"]] = None, **kwargs: Any ) -> None: @@ -2592,6 +3256,13 @@ def __init__( :paramtype tags: dict[str, str] :keyword credentials: [Required] Account credentials. Required. :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :keyword intellectual_property: Intellectual Property details. + :paramtype intellectual_property: + ~azure.mgmt.machinelearningservices.models.IntellectualProperty + :keyword resource_group: Azure Resource Group name. + :paramtype resource_group: str + :keyword subscription_id: Azure Subscription Id. + :paramtype subscription_id: str :keyword service_data_access_auth_identity: Indicates which identity to use to authenticate service data access to customer's storage. Known values are: "None", "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". @@ -2600,13 +3271,30 @@ def __init__( :keyword store_name: [Required] Azure Data Lake store name. Required. :paramtype store_name: str """ - super().__init__(description=description, properties=properties, tags=tags, credentials=credentials, **kwargs) + super().__init__( + resource_group=resource_group, + subscription_id=subscription_id, + description=description, + properties=properties, + tags=tags, + credentials=credentials, + intellectual_property=intellectual_property, + **kwargs + ) + self.description = description + self.properties = properties + self.tags = tags + self.credentials = credentials self.datastore_type: str = "AzureDataLakeGen1" + self.intellectual_property = intellectual_property + self.is_default = None self.service_data_access_auth_identity = service_data_access_auth_identity self.store_name = store_name + self.resource_group = resource_group + self.subscription_id = subscription_id -class AzureDataLakeGen2Datastore(DatastoreProperties): # pylint: disable=too-many-instance-attributes +class AzureDataLakeGen2Datastore(AzureDatastore, DatastoreProperties): # pylint: disable=too-many-instance-attributes """Azure Data Lake Gen2 datastore configuration. Variables are only populated by the server, and will be ignored when sending a request. @@ -2622,11 +3310,17 @@ class AzureDataLakeGen2Datastore(DatastoreProperties): # pylint: disable=too-ma :ivar credentials: [Required] Account credentials. Required. :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials :ivar datastore_type: [Required] Storage type backing the datastore. Required. Known values - are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", and "AzureFile". + are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile", "Hdfs", and "OneLake". :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType + :ivar intellectual_property: Intellectual Property details. + :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty :ivar is_default: Readonly property to indicate if datastore is the workspace default datastore. :vartype is_default: bool + :ivar resource_group: Azure Resource Group name. + :vartype resource_group: str + :ivar subscription_id: Azure Subscription Id. + :vartype subscription_id: str :ivar account_name: [Required] Storage account name. Required. :vartype account_name: str :ivar endpoint: Azure cloud endpoint for the storage account. @@ -2656,7 +3350,10 @@ class AzureDataLakeGen2Datastore(DatastoreProperties): # pylint: disable=too-ma "tags": {"key": "tags", "type": "{str}"}, "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, "datastore_type": {"key": "datastoreType", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, "is_default": {"key": "isDefault", "type": "bool"}, + "resource_group": {"key": "resourceGroup", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, "account_name": {"key": "accountName", "type": "str"}, "endpoint": {"key": "endpoint", "type": "str"}, "filesystem": {"key": "filesystem", "type": "str"}, @@ -2673,6 +3370,9 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, + intellectual_property: Optional["_models.IntellectualProperty"] = None, + resource_group: Optional[str] = None, + subscription_id: Optional[str] = None, endpoint: Optional[str] = None, protocol: Optional[str] = None, service_data_access_auth_identity: Optional[Union[str, "_models.ServiceDataAccessAuthIdentity"]] = None, @@ -2687,6 +3387,13 @@ def __init__( :paramtype tags: dict[str, str] :keyword credentials: [Required] Account credentials. Required. :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :keyword intellectual_property: Intellectual Property details. + :paramtype intellectual_property: + ~azure.mgmt.machinelearningservices.models.IntellectualProperty + :keyword resource_group: Azure Resource Group name. + :paramtype resource_group: str + :keyword subscription_id: Azure Subscription Id. + :paramtype subscription_id: str :keyword account_name: [Required] Storage account name. Required. :paramtype account_name: str :keyword endpoint: Azure cloud endpoint for the storage account. @@ -2701,49 +3408,138 @@ def __init__( :paramtype service_data_access_auth_identity: str or ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity """ - super().__init__(description=description, properties=properties, tags=tags, credentials=credentials, **kwargs) + super().__init__( + resource_group=resource_group, + subscription_id=subscription_id, + description=description, + properties=properties, + tags=tags, + credentials=credentials, + intellectual_property=intellectual_property, + **kwargs + ) + self.description = description + self.properties = properties + self.tags = tags + self.credentials = credentials self.datastore_type: str = "AzureDataLakeGen2" + self.intellectual_property = intellectual_property + self.is_default = None self.account_name = account_name self.endpoint = endpoint self.filesystem = filesystem self.protocol = protocol self.service_data_access_auth_identity = service_data_access_auth_identity + self.resource_group = resource_group + self.subscription_id = subscription_id -class AzureFileDatastore(DatastoreProperties): # pylint: disable=too-many-instance-attributes - """Azure File datastore configuration. +class Webhook(_serialization.Model): + """Webhook base. - Variables are only populated by the server, and will be ignored when sending a request. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzureDevOpsWebhook All required parameters must be populated in order to send to Azure. - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar credentials: [Required] Account credentials. Required. - :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials - :ivar datastore_type: [Required] Storage type backing the datastore. Required. Known values - are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", and "AzureFile". - :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType - :ivar is_default: Readonly property to indicate if datastore is the workspace default - datastore. - :vartype is_default: bool - :ivar account_name: [Required] Storage account name. Required. - :vartype account_name: str - :ivar endpoint: Azure cloud endpoint for the storage account. - :vartype endpoint: str - :ivar file_share_name: [Required] The name of the Azure file share that the datastore points - to. Required. - :vartype file_share_name: str - :ivar protocol: Protocol used to communicate with the storage account. - :vartype protocol: str - :ivar service_data_access_auth_identity: Indicates which identity to use to authenticate - service data access to customer's storage. Known values are: "None", - "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". - :vartype service_data_access_auth_identity: str or + :ivar event_type: Send callback on a specified notification event. + :vartype event_type: str + :ivar webhook_type: [Required] Specifies the type of service to send a callback. Required. + "AzureDevOps" + :vartype webhook_type: str or ~azure.mgmt.machinelearningservices.models.WebhookType + """ + + _validation = { + "webhook_type": {"required": True}, + } + + _attribute_map = { + "event_type": {"key": "eventType", "type": "str"}, + "webhook_type": {"key": "webhookType", "type": "str"}, + } + + _subtype_map = {"webhook_type": {"AzureDevOps": "AzureDevOpsWebhook"}} + + def __init__(self, *, event_type: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword event_type: Send callback on a specified notification event. + :paramtype event_type: str + """ + super().__init__(**kwargs) + self.event_type = event_type + self.webhook_type: Optional[str] = None + + +class AzureDevOpsWebhook(Webhook): + """Webhook details specific for Azure DevOps. + + All required parameters must be populated in order to send to Azure. + + :ivar event_type: Send callback on a specified notification event. + :vartype event_type: str + :ivar webhook_type: [Required] Specifies the type of service to send a callback. Required. + "AzureDevOps" + :vartype webhook_type: str or ~azure.mgmt.machinelearningservices.models.WebhookType + """ + + _validation = { + "webhook_type": {"required": True}, + } + + _attribute_map = { + "event_type": {"key": "eventType", "type": "str"}, + "webhook_type": {"key": "webhookType", "type": "str"}, + } + + def __init__(self, *, event_type: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword event_type: Send callback on a specified notification event. + :paramtype event_type: str + """ + super().__init__(event_type=event_type, **kwargs) + self.webhook_type: str = "AzureDevOps" + + +class AzureFileDatastore(AzureDatastore, DatastoreProperties): # pylint: disable=too-many-instance-attributes + """Azure File datastore configuration. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar credentials: [Required] Account credentials. Required. + :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :ivar datastore_type: [Required] Storage type backing the datastore. Required. Known values + are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile", "Hdfs", and "OneLake". + :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType + :ivar intellectual_property: Intellectual Property details. + :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty + :ivar is_default: Readonly property to indicate if datastore is the workspace default + datastore. + :vartype is_default: bool + :ivar resource_group: Azure Resource Group name. + :vartype resource_group: str + :ivar subscription_id: Azure Subscription Id. + :vartype subscription_id: str + :ivar account_name: [Required] Storage account name. Required. + :vartype account_name: str + :ivar endpoint: Azure cloud endpoint for the storage account. + :vartype endpoint: str + :ivar file_share_name: [Required] The name of the Azure file share that the datastore points + to. Required. + :vartype file_share_name: str + :ivar protocol: Protocol used to communicate with the storage account. + :vartype protocol: str + :ivar service_data_access_auth_identity: Indicates which identity to use to authenticate + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". + :vartype service_data_access_auth_identity: str or ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity """ @@ -2761,7 +3557,10 @@ class AzureFileDatastore(DatastoreProperties): # pylint: disable=too-many-insta "tags": {"key": "tags", "type": "{str}"}, "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, "datastore_type": {"key": "datastoreType", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, "is_default": {"key": "isDefault", "type": "bool"}, + "resource_group": {"key": "resourceGroup", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, "account_name": {"key": "accountName", "type": "str"}, "endpoint": {"key": "endpoint", "type": "str"}, "file_share_name": {"key": "fileShareName", "type": "str"}, @@ -2778,6 +3577,9 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, + intellectual_property: Optional["_models.IntellectualProperty"] = None, + resource_group: Optional[str] = None, + subscription_id: Optional[str] = None, endpoint: Optional[str] = None, protocol: Optional[str] = None, service_data_access_auth_identity: Optional[Union[str, "_models.ServiceDataAccessAuthIdentity"]] = None, @@ -2792,6 +3594,13 @@ def __init__( :paramtype tags: dict[str, str] :keyword credentials: [Required] Account credentials. Required. :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :keyword intellectual_property: Intellectual Property details. + :paramtype intellectual_property: + ~azure.mgmt.machinelearningservices.models.IntellectualProperty + :keyword resource_group: Azure Resource Group name. + :paramtype resource_group: str + :keyword subscription_id: Azure Subscription Id. + :paramtype subscription_id: str :keyword account_name: [Required] Storage account name. Required. :paramtype account_name: str :keyword endpoint: Azure cloud endpoint for the storage account. @@ -2807,13 +3616,129 @@ def __init__( :paramtype service_data_access_auth_identity: str or ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity """ - super().__init__(description=description, properties=properties, tags=tags, credentials=credentials, **kwargs) + super().__init__( + resource_group=resource_group, + subscription_id=subscription_id, + description=description, + properties=properties, + tags=tags, + credentials=credentials, + intellectual_property=intellectual_property, + **kwargs + ) + self.description = description + self.properties = properties + self.tags = tags + self.credentials = credentials self.datastore_type: str = "AzureFile" + self.intellectual_property = intellectual_property + self.is_default = None self.account_name = account_name self.endpoint = endpoint self.file_share_name = file_share_name self.protocol = protocol self.service_data_access_auth_identity = service_data_access_auth_identity + self.resource_group = resource_group + self.subscription_id = subscription_id + + +class InferencingServer(_serialization.Model): + """InferencingServer. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzureMLBatchInferencingServer, AzureMLOnlineInferencingServer, CustomInferencingServer, + TritonInferencingServer + + All required parameters must be populated in order to send to Azure. + + :ivar server_type: [Required] Inferencing server type for various targets. Required. Known + values are: "AzureMLOnline", "AzureMLBatch", "Triton", and "Custom". + :vartype server_type: str or ~azure.mgmt.machinelearningservices.models.InferencingServerType + """ + + _validation = { + "server_type": {"required": True}, + } + + _attribute_map = { + "server_type": {"key": "serverType", "type": "str"}, + } + + _subtype_map = { + "server_type": { + "AzureMLBatch": "AzureMLBatchInferencingServer", + "AzureMLOnline": "AzureMLOnlineInferencingServer", + "Custom": "CustomInferencingServer", + "Triton": "TritonInferencingServer", + } + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.server_type: Optional[str] = None + + +class AzureMLBatchInferencingServer(InferencingServer): + """Azure ML batch inferencing server configurations. + + All required parameters must be populated in order to send to Azure. + + :ivar server_type: [Required] Inferencing server type for various targets. Required. Known + values are: "AzureMLOnline", "AzureMLBatch", "Triton", and "Custom". + :vartype server_type: str or ~azure.mgmt.machinelearningservices.models.InferencingServerType + :ivar code_configuration: Code configuration for AML batch inferencing server. + :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + """ + + _validation = { + "server_type": {"required": True}, + } + + _attribute_map = { + "server_type": {"key": "serverType", "type": "str"}, + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, + } + + def __init__(self, *, code_configuration: Optional["_models.CodeConfiguration"] = None, **kwargs: Any) -> None: + """ + :keyword code_configuration: Code configuration for AML batch inferencing server. + :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + """ + super().__init__(**kwargs) + self.server_type: str = "AzureMLBatch" + self.code_configuration = code_configuration + + +class AzureMLOnlineInferencingServer(InferencingServer): + """Azure ML online inferencing configurations. + + All required parameters must be populated in order to send to Azure. + + :ivar server_type: [Required] Inferencing server type for various targets. Required. Known + values are: "AzureMLOnline", "AzureMLBatch", "Triton", and "Custom". + :vartype server_type: str or ~azure.mgmt.machinelearningservices.models.InferencingServerType + :ivar code_configuration: Code configuration for AML inferencing server. + :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + """ + + _validation = { + "server_type": {"required": True}, + } + + _attribute_map = { + "server_type": {"key": "serverType", "type": "str"}, + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, + } + + def __init__(self, *, code_configuration: Optional["_models.CodeConfiguration"] = None, **kwargs: Any) -> None: + """ + :keyword code_configuration: Code configuration for AML inferencing server. + :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + """ + super().__init__(**kwargs) + self.server_type: str = "AzureMLOnline" + self.code_configuration = code_configuration class EarlyTerminationPolicy(_serialization.Model): @@ -2922,6 +3847,69 @@ def __init__( self.slack_factor = slack_factor +class BaseEnvironmentSource(_serialization.Model): + """BaseEnvironmentSource. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + BaseEnvironmentId + + All required parameters must be populated in order to send to Azure. + + :ivar base_environment_source_type: [Required] Base environment type. Required. + "EnvironmentAsset" + :vartype base_environment_source_type: str or + ~azure.mgmt.machinelearningservices.models.BaseEnvironmentSourceType + """ + + _validation = { + "base_environment_source_type": {"required": True}, + } + + _attribute_map = { + "base_environment_source_type": {"key": "baseEnvironmentSourceType", "type": "str"}, + } + + _subtype_map = {"base_environment_source_type": {"EnvironmentAsset": "BaseEnvironmentId"}} + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.base_environment_source_type: Optional[str] = None + + +class BaseEnvironmentId(BaseEnvironmentSource): + """Base environment type. + + All required parameters must be populated in order to send to Azure. + + :ivar base_environment_source_type: [Required] Base environment type. Required. + "EnvironmentAsset" + :vartype base_environment_source_type: str or + ~azure.mgmt.machinelearningservices.models.BaseEnvironmentSourceType + :ivar resource_id: [Required] Resource id accepting ArmId or AzureMlId. Required. + :vartype resource_id: str + """ + + _validation = { + "base_environment_source_type": {"required": True}, + "resource_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "base_environment_source_type": {"key": "baseEnvironmentSourceType", "type": "str"}, + "resource_id": {"key": "resourceId", "type": "str"}, + } + + def __init__(self, *, resource_id: str, **kwargs: Any) -> None: + """ + :keyword resource_id: [Required] Resource id accepting ArmId or AzureMlId. Required. + :paramtype resource_id: str + """ + super().__init__(**kwargs) + self.base_environment_source_type: str = "EnvironmentAsset" + self.resource_id = resource_id + + class Resource(_serialization.Model): """Common fields that are returned in the response for all Azure Resource Manager resources. @@ -3105,6 +4093,38 @@ def __init__( self.sku = sku +class BatchDeploymentConfiguration(_serialization.Model): + """Properties relevant to different deployment types. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + BatchPipelineComponentDeploymentConfiguration + + All required parameters must be populated in order to send to Azure. + + :ivar deployment_configuration_type: [Required] The type of the deployment. Required. Known + values are: "Model" and "PipelineComponent". + :vartype deployment_configuration_type: str or + ~azure.mgmt.machinelearningservices.models.BatchDeploymentConfigurationType + """ + + _validation = { + "deployment_configuration_type": {"required": True}, + } + + _attribute_map = { + "deployment_configuration_type": {"key": "deploymentConfigurationType", "type": "str"}, + } + + _subtype_map = { + "deployment_configuration_type": {"PipelineComponent": "BatchPipelineComponentDeploymentConfiguration"} + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.deployment_configuration_type: Optional[str] = None + + class EndpointDeploymentPropertiesBase(_serialization.Model): """Base definition for endpoint deployment. @@ -3112,8 +4132,8 @@ class EndpointDeploymentPropertiesBase(_serialization.Model): :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration :ivar description: Description of the endpoint deployment. :vartype description: str - :ivar environment_id: ARM resource ID or AssetId of the environment specification for the - endpoint deployment. + :ivar environment_id: ARM resource ID of the environment specification for the endpoint + deployment. :vartype environment_id: str :ivar environment_variables: Environment variables configuration for the deployment. :vartype environment_variables: dict[str, str] @@ -3144,8 +4164,8 @@ def __init__( :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration :keyword description: Description of the endpoint deployment. :paramtype description: str - :keyword environment_id: ARM resource ID or AssetId of the environment specification for the - endpoint deployment. + :keyword environment_id: ARM resource ID of the environment specification for the endpoint + deployment. :paramtype environment_id: str :keyword environment_variables: Environment variables configuration for the deployment. :paramtype environment_variables: dict[str, str] @@ -3169,8 +4189,8 @@ class BatchDeploymentProperties(EndpointDeploymentPropertiesBase): # pylint: di :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration :ivar description: Description of the endpoint deployment. :vartype description: str - :ivar environment_id: ARM resource ID or AssetId of the environment specification for the - endpoint deployment. + :ivar environment_id: ARM resource ID of the environment specification for the endpoint + deployment. :vartype environment_id: str :ivar environment_variables: Environment variables configuration for the deployment. :vartype environment_variables: dict[str, str] @@ -3178,6 +4198,9 @@ class BatchDeploymentProperties(EndpointDeploymentPropertiesBase): # pylint: di :vartype properties: dict[str, str] :ivar compute: Compute target for batch inference operation. :vartype compute: str + :ivar deployment_configuration: Properties relevant to different deployment types. + :vartype deployment_configuration: + ~azure.mgmt.machinelearningservices.models.BatchDeploymentConfiguration :ivar error_threshold: Error threshold, if the error count for the entire input goes above this value, the batch inference will be aborted. Range is [-1, int.MaxValue]. @@ -3224,6 +4247,7 @@ class BatchDeploymentProperties(EndpointDeploymentPropertiesBase): # pylint: di "environment_variables": {"key": "environmentVariables", "type": "{str}"}, "properties": {"key": "properties", "type": "{str}"}, "compute": {"key": "compute", "type": "str"}, + "deployment_configuration": {"key": "deploymentConfiguration", "type": "BatchDeploymentConfiguration"}, "error_threshold": {"key": "errorThreshold", "type": "int"}, "logging_level": {"key": "loggingLevel", "type": "str"}, "max_concurrency_per_instance": {"key": "maxConcurrencyPerInstance", "type": "int"}, @@ -3245,6 +4269,7 @@ def __init__( environment_variables: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, compute: Optional[str] = None, + deployment_configuration: Optional["_models.BatchDeploymentConfiguration"] = None, error_threshold: int = -1, logging_level: Optional[Union[str, "_models.BatchLoggingLevel"]] = None, max_concurrency_per_instance: int = 1, @@ -3261,8 +4286,8 @@ def __init__( :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration :keyword description: Description of the endpoint deployment. :paramtype description: str - :keyword environment_id: ARM resource ID or AssetId of the environment specification for the - endpoint deployment. + :keyword environment_id: ARM resource ID of the environment specification for the endpoint + deployment. :paramtype environment_id: str :keyword environment_variables: Environment variables configuration for the deployment. :paramtype environment_variables: dict[str, str] @@ -3270,6 +4295,9 @@ def __init__( :paramtype properties: dict[str, str] :keyword compute: Compute target for batch inference operation. :paramtype compute: str + :keyword deployment_configuration: Properties relevant to different deployment types. + :paramtype deployment_configuration: + ~azure.mgmt.machinelearningservices.models.BatchDeploymentConfiguration :keyword error_threshold: Error threshold, if the error count for the entire input goes above this value, the batch inference will be aborted. Range is [-1, int.MaxValue]. @@ -3310,6 +4338,7 @@ def __init__( **kwargs ) self.compute = compute + self.deployment_configuration = deployment_configuration self.error_threshold = error_threshold self.logging_level = logging_level self.max_concurrency_per_instance = max_concurrency_per_instance @@ -3481,7 +4510,7 @@ class EndpointPropertiesBase(_serialization.Model): :vartype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys :ivar properties: Property dictionary. Properties can be added, but not removed or altered. :vartype properties: dict[str, str] - :ivar scoring_uri: Endpoint URI. + :ivar scoring_uri: Endpoint URI for the inference endpoint. :vartype scoring_uri: str :ivar swagger_uri: Endpoint Swagger URI. :vartype swagger_uri: str @@ -3553,7 +4582,7 @@ class BatchEndpointProperties(EndpointPropertiesBase): :vartype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys :ivar properties: Property dictionary. Properties can be added, but not removed or altered. :vartype properties: dict[str, str] - :ivar scoring_uri: Endpoint URI. + :ivar scoring_uri: Endpoint URI for the inference endpoint. :vartype scoring_uri: str :ivar swagger_uri: Endpoint Swagger URI. :vartype swagger_uri: str @@ -3644,6 +4673,64 @@ def __init__( self.value = value +class BatchPipelineComponentDeploymentConfiguration(BatchDeploymentConfiguration): + """Properties for a Batch Pipeline Component Deployment. + + All required parameters must be populated in order to send to Azure. + + :ivar deployment_configuration_type: [Required] The type of the deployment. Required. Known + values are: "Model" and "PipelineComponent". + :vartype deployment_configuration_type: str or + ~azure.mgmt.machinelearningservices.models.BatchDeploymentConfigurationType + :ivar component_id: The ARM id of the component to be run. + :vartype component_id: ~azure.mgmt.machinelearningservices.models.IdAssetReference + :ivar description: The description which will be applied to the job. + :vartype description: str + :ivar settings: Run-time settings for the pipeline job. + :vartype settings: dict[str, str] + :ivar tags: The tags which will be applied to the job. + :vartype tags: dict[str, str] + """ + + _validation = { + "deployment_configuration_type": {"required": True}, + } + + _attribute_map = { + "deployment_configuration_type": {"key": "deploymentConfigurationType", "type": "str"}, + "component_id": {"key": "componentId", "type": "IdAssetReference"}, + "description": {"key": "description", "type": "str"}, + "settings": {"key": "settings", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + } + + def __init__( + self, + *, + component_id: Optional["_models.IdAssetReference"] = None, + description: Optional[str] = None, + settings: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> None: + """ + :keyword component_id: The ARM id of the component to be run. + :paramtype component_id: ~azure.mgmt.machinelearningservices.models.IdAssetReference + :keyword description: The description which will be applied to the job. + :paramtype description: str + :keyword settings: Run-time settings for the pipeline job. + :paramtype settings: dict[str, str] + :keyword tags: The tags which will be applied to the job. + :paramtype tags: dict[str, str] + """ + super().__init__(**kwargs) + self.deployment_configuration_type: str = "PipelineComponent" + self.component_id = component_id + self.description = description + self.settings = settings + self.tags = tags + + class BatchRetrySettings(_serialization.Model): """Retry settings for a batch inference operation. @@ -3735,7 +4822,7 @@ def __init__(self, **kwargs: Any) -> None: class BindOptions(_serialization.Model): - """Describes the bind options for the container. + """BindOptions. :ivar propagation: Type of Bind Option. :vartype propagation: str @@ -3872,30 +4959,305 @@ def __init__(self, *, context_uri: str, dockerfile_path: str = "Dockerfile", **k self.dockerfile_path = dockerfile_path -class CertificateDatastoreCredentials(DatastoreCredentials): - """Certificate datastore credentials configuration. +class DataDriftMetricThresholdBase(_serialization.Model): + """DataDriftMetricThresholdBase. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CategoricalDataDriftMetricThreshold, NumericalDataDriftMetricThreshold All required parameters must be populated in order to send to Azure. - :ivar credentials_type: [Required] Credential type used to authentication with storage. - Required. Known values are: "AccountKey", "Certificate", "None", "Sas", and "ServicePrincipal". - :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType - :ivar authority_url: Authority URL used for authentication. - :vartype authority_url: str - :ivar client_id: [Required] Service principal client ID. Required. - :vartype client_id: str - :ivar resource_url: Resource the service principal has access to. - :vartype resource_url: str - :ivar secrets: [Required] Service principal secrets. Required. - :vartype secrets: ~azure.mgmt.machinelearningservices.models.CertificateDatastoreSecrets - :ivar tenant_id: [Required] ID of the tenant to which the service principal belongs. Required. - :vartype tenant_id: str - :ivar thumbprint: [Required] Thumbprint of the certificate used for authentication. Required. - :vartype thumbprint: str + :ivar data_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Numerical" and "Categorical". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType + :ivar threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold """ _validation = { - "credentials_type": {"required": True}, + "data_type": {"required": True}, + } + + _attribute_map = { + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + } + + _subtype_map = { + "data_type": { + "Categorical": "CategoricalDataDriftMetricThreshold", + "Numerical": "NumericalDataDriftMetricThreshold", + } + } + + def __init__(self, *, threshold: Optional["_models.MonitoringThreshold"] = None, **kwargs: Any) -> None: + """ + :keyword threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + """ + super().__init__(**kwargs) + self.data_type: Optional[str] = None + self.threshold = threshold + + +class CategoricalDataDriftMetricThreshold(DataDriftMetricThresholdBase): + """CategoricalDataDriftMetricThreshold. + + All required parameters must be populated in order to send to Azure. + + :ivar data_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Numerical" and "Categorical". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType + :ivar threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + :ivar metric: [Required] The categorical data drift metric to calculate. Required. Known values + are: "JensenShannonDistance", "PopulationStabilityIndex", and "PearsonsChiSquaredTest". + :vartype metric: str or ~azure.mgmt.machinelearningservices.models.CategoricalDataDriftMetric + """ + + _validation = { + "data_type": {"required": True}, + "metric": {"required": True}, + } + + _attribute_map = { + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, + } + + def __init__( + self, + *, + metric: Union[str, "_models.CategoricalDataDriftMetric"], + threshold: Optional["_models.MonitoringThreshold"] = None, + **kwargs: Any + ) -> None: + """ + :keyword threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + :keyword metric: [Required] The categorical data drift metric to calculate. Required. Known + values are: "JensenShannonDistance", "PopulationStabilityIndex", and "PearsonsChiSquaredTest". + :paramtype metric: str or ~azure.mgmt.machinelearningservices.models.CategoricalDataDriftMetric + """ + super().__init__(threshold=threshold, **kwargs) + self.data_type: str = "Categorical" + self.metric = metric + + +class DataQualityMetricThresholdBase(_serialization.Model): + """DataQualityMetricThresholdBase. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CategoricalDataQualityMetricThreshold, NumericalDataQualityMetricThreshold + + All required parameters must be populated in order to send to Azure. + + :ivar data_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Numerical" and "Categorical". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType + :ivar threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + """ + + _validation = { + "data_type": {"required": True}, + } + + _attribute_map = { + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + } + + _subtype_map = { + "data_type": { + "Categorical": "CategoricalDataQualityMetricThreshold", + "Numerical": "NumericalDataQualityMetricThreshold", + } + } + + def __init__(self, *, threshold: Optional["_models.MonitoringThreshold"] = None, **kwargs: Any) -> None: + """ + :keyword threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + """ + super().__init__(**kwargs) + self.data_type: Optional[str] = None + self.threshold = threshold + + +class CategoricalDataQualityMetricThreshold(DataQualityMetricThresholdBase): + """CategoricalDataQualityMetricThreshold. + + All required parameters must be populated in order to send to Azure. + + :ivar data_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Numerical" and "Categorical". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType + :ivar threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + :ivar metric: [Required] The categorical data quality metric to calculate. Required. Known + values are: "NullValueRate", "DataTypeErrorRate", and "OutOfBoundsRate". + :vartype metric: str or ~azure.mgmt.machinelearningservices.models.CategoricalDataQualityMetric + """ + + _validation = { + "data_type": {"required": True}, + "metric": {"required": True}, + } + + _attribute_map = { + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, + } + + def __init__( + self, + *, + metric: Union[str, "_models.CategoricalDataQualityMetric"], + threshold: Optional["_models.MonitoringThreshold"] = None, + **kwargs: Any + ) -> None: + """ + :keyword threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + :keyword metric: [Required] The categorical data quality metric to calculate. Required. Known + values are: "NullValueRate", "DataTypeErrorRate", and "OutOfBoundsRate". + :paramtype metric: str or + ~azure.mgmt.machinelearningservices.models.CategoricalDataQualityMetric + """ + super().__init__(threshold=threshold, **kwargs) + self.data_type: str = "Categorical" + self.metric = metric + + +class PredictionDriftMetricThresholdBase(_serialization.Model): + """PredictionDriftMetricThresholdBase. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CategoricalPredictionDriftMetricThreshold, NumericalPredictionDriftMetricThreshold + + All required parameters must be populated in order to send to Azure. + + :ivar data_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Numerical" and "Categorical". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType + :ivar threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + """ + + _validation = { + "data_type": {"required": True}, + } + + _attribute_map = { + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + } + + _subtype_map = { + "data_type": { + "Categorical": "CategoricalPredictionDriftMetricThreshold", + "Numerical": "NumericalPredictionDriftMetricThreshold", + } + } + + def __init__(self, *, threshold: Optional["_models.MonitoringThreshold"] = None, **kwargs: Any) -> None: + """ + :keyword threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + """ + super().__init__(**kwargs) + self.data_type: Optional[str] = None + self.threshold = threshold + + +class CategoricalPredictionDriftMetricThreshold(PredictionDriftMetricThresholdBase): + """CategoricalPredictionDriftMetricThreshold. + + All required parameters must be populated in order to send to Azure. + + :ivar data_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Numerical" and "Categorical". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType + :ivar threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + :ivar metric: [Required] The categorical prediction drift metric to calculate. Required. Known + values are: "JensenShannonDistance", "PopulationStabilityIndex", and "PearsonsChiSquaredTest". + :vartype metric: str or + ~azure.mgmt.machinelearningservices.models.CategoricalPredictionDriftMetric + """ + + _validation = { + "data_type": {"required": True}, + "metric": {"required": True}, + } + + _attribute_map = { + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, + } + + def __init__( + self, + *, + metric: Union[str, "_models.CategoricalPredictionDriftMetric"], + threshold: Optional["_models.MonitoringThreshold"] = None, + **kwargs: Any + ) -> None: + """ + :keyword threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + :keyword metric: [Required] The categorical prediction drift metric to calculate. Required. + Known values are: "JensenShannonDistance", "PopulationStabilityIndex", and + "PearsonsChiSquaredTest". + :paramtype metric: str or + ~azure.mgmt.machinelearningservices.models.CategoricalPredictionDriftMetric + """ + super().__init__(threshold=threshold, **kwargs) + self.data_type: str = "Categorical" + self.metric = metric + + +class CertificateDatastoreCredentials(DatastoreCredentials): + """Certificate datastore credentials configuration. + + All required parameters must be populated in order to send to Azure. + + :ivar credentials_type: [Required] Credential type used to authentication with storage. + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". + :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType + :ivar authority_url: Authority URL used for authentication. + :vartype authority_url: str + :ivar client_id: [Required] Service principal client ID. Required. + :vartype client_id: str + :ivar resource_url: Resource the service principal has access to. + :vartype resource_url: str + :ivar secrets: [Required] Service principal secrets. Required. + :vartype secrets: ~azure.mgmt.machinelearningservices.models.CertificateDatastoreSecrets + :ivar tenant_id: [Required] ID of the tenant to which the service principal belongs. Required. + :vartype tenant_id: str + :ivar thumbprint: [Required] Thumbprint of the certificate used for authentication. Required. + :vartype thumbprint: str + """ + + _validation = { + "credentials_type": {"required": True}, "client_id": {"required": True}, "secrets": {"required": True}, "tenant_id": {"required": True}, @@ -3955,7 +5317,8 @@ class CertificateDatastoreSecrets(DatastoreSecrets): All required parameters must be populated in order to send to Azure. :ivar secrets_type: [Required] Credential type used to authentication with storage. Required. - Known values are: "AccountKey", "Certificate", "Sas", and "ServicePrincipal". + Known values are: "AccountKey", "Certificate", "Sas", "ServicePrincipal", "KerberosPassword", + and "KerberosKeytab". :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType :ivar certificate: Service principal certificate. :vartype certificate: str @@ -3980,7 +5343,7 @@ def __init__(self, *, certificate: Optional[str] = None, **kwargs: Any) -> None: self.certificate = certificate -class TableVertical(_serialization.Model): +class TableVertical(_serialization.Model): # pylint: disable=too-many-instance-attributes """Abstract class for AutoML tasks that use table dataset as input - such as Classification/Regression/Forecasting. @@ -3989,11 +5352,19 @@ class TableVertical(_serialization.Model): :ivar featurization_settings: Featurization inputs needed for AutoML job. :vartype featurization_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :ivar fixed_parameters: Model/training parameters that will remain constant throughout + training. + :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters :ivar limit_settings: Execution constraints for AutoMLJob. :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings :ivar n_cross_validations: Number of cross validation folds to be applied on training dataset when validation dataset is not provided. :vartype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] + :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings :ivar test_data: Test data input. :vartype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :ivar test_data_size: The fraction of test dataset that needs to be set aside for validation @@ -4016,8 +5387,11 @@ class TableVertical(_serialization.Model): _attribute_map = { "cv_split_column_names": {"key": "cvSplitColumnNames", "type": "[str]"}, "featurization_settings": {"key": "featurizationSettings", "type": "TableVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "TableFixedParameters"}, "limit_settings": {"key": "limitSettings", "type": "TableVerticalLimitSettings"}, "n_cross_validations": {"key": "nCrossValidations", "type": "NCrossValidations"}, + "search_space": {"key": "searchSpace", "type": "[TableParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "TableSweepSettings"}, "test_data": {"key": "testData", "type": "MLTableJobInput"}, "test_data_size": {"key": "testDataSize", "type": "float"}, "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, @@ -4030,8 +5404,11 @@ def __init__( *, cv_split_column_names: Optional[List[str]] = None, featurization_settings: Optional["_models.TableVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.TableFixedParameters"] = None, limit_settings: Optional["_models.TableVerticalLimitSettings"] = None, n_cross_validations: Optional["_models.NCrossValidations"] = None, + search_space: Optional[List["_models.TableParameterSubspace"]] = None, + sweep_settings: Optional["_models.TableSweepSettings"] = None, test_data: Optional["_models.MLTableJobInput"] = None, test_data_size: Optional[float] = None, validation_data: Optional["_models.MLTableJobInput"] = None, @@ -4045,6 +5422,9 @@ def __init__( :keyword featurization_settings: Featurization inputs needed for AutoML job. :paramtype featurization_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :keyword fixed_parameters: Model/training parameters that will remain constant throughout + training. + :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters :keyword limit_settings: Execution constraints for AutoMLJob. :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings @@ -4052,6 +5432,12 @@ def __init__( dataset when validation dataset is not provided. :paramtype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] + :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings :keyword test_data: Test data input. :paramtype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :keyword test_data_size: The fraction of test dataset that needs to be set aside for validation @@ -4073,8 +5459,11 @@ def __init__( super().__init__(**kwargs) self.cv_split_column_names = cv_split_column_names self.featurization_settings = featurization_settings + self.fixed_parameters = fixed_parameters self.limit_settings = limit_settings self.n_cross_validations = n_cross_validations + self.search_space = search_space + self.sweep_settings = sweep_settings self.test_data = test_data self.test_data_size = test_data_size self.validation_data = validation_data @@ -4105,11 +5494,19 @@ class Classification(TableVertical, AutoMLVertical): # pylint: disable=too-many :ivar featurization_settings: Featurization inputs needed for AutoML job. :vartype featurization_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :ivar fixed_parameters: Model/training parameters that will remain constant throughout + training. + :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters :ivar limit_settings: Execution constraints for AutoMLJob. :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings :ivar n_cross_validations: Number of cross validation folds to be applied on training dataset when validation dataset is not provided. :vartype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] + :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings :ivar test_data: Test data input. :vartype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :ivar test_data_size: The fraction of test dataset that needs to be set aside for validation @@ -4150,8 +5547,11 @@ class Classification(TableVertical, AutoMLVertical): # pylint: disable=too-many "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, "cv_split_column_names": {"key": "cvSplitColumnNames", "type": "[str]"}, "featurization_settings": {"key": "featurizationSettings", "type": "TableVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "TableFixedParameters"}, "limit_settings": {"key": "limitSettings", "type": "TableVerticalLimitSettings"}, "n_cross_validations": {"key": "nCrossValidations", "type": "NCrossValidations"}, + "search_space": {"key": "searchSpace", "type": "[TableParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "TableSweepSettings"}, "test_data": {"key": "testData", "type": "MLTableJobInput"}, "test_data_size": {"key": "testDataSize", "type": "float"}, "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, @@ -4170,8 +5570,11 @@ def __init__( target_column_name: Optional[str] = None, cv_split_column_names: Optional[List[str]] = None, featurization_settings: Optional["_models.TableVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.TableFixedParameters"] = None, limit_settings: Optional["_models.TableVerticalLimitSettings"] = None, n_cross_validations: Optional["_models.NCrossValidations"] = None, + search_space: Optional[List["_models.TableParameterSubspace"]] = None, + sweep_settings: Optional["_models.TableSweepSettings"] = None, test_data: Optional["_models.MLTableJobInput"] = None, test_data_size: Optional[float] = None, validation_data: Optional["_models.MLTableJobInput"] = None, @@ -4196,6 +5599,9 @@ def __init__( :keyword featurization_settings: Featurization inputs needed for AutoML job. :paramtype featurization_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :keyword fixed_parameters: Model/training parameters that will remain constant throughout + training. + :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters :keyword limit_settings: Execution constraints for AutoMLJob. :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings @@ -4203,6 +5609,12 @@ def __init__( dataset when validation dataset is not provided. :paramtype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] + :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings :keyword test_data: Test data input. :paramtype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :keyword test_data_size: The fraction of test dataset that needs to be set aside for validation @@ -4233,8 +5645,11 @@ def __init__( super().__init__( cv_split_column_names=cv_split_column_names, featurization_settings=featurization_settings, + fixed_parameters=fixed_parameters, limit_settings=limit_settings, n_cross_validations=n_cross_validations, + search_space=search_space, + sweep_settings=sweep_settings, test_data=test_data, test_data_size=test_data_size, validation_data=validation_data, @@ -4254,8 +5669,11 @@ def __init__( self.training_settings = training_settings self.cv_split_column_names = cv_split_column_names self.featurization_settings = featurization_settings + self.fixed_parameters = fixed_parameters self.limit_settings = limit_settings self.n_cross_validations = n_cross_validations + self.search_space = search_space + self.sweep_settings = sweep_settings self.test_data = test_data self.test_data_size = test_data_size self.validation_data = validation_data @@ -4263,6 +5681,98 @@ def __init__( self.weight_column_name = weight_column_name +class ModelPerformanceMetricThresholdBase(_serialization.Model): + """ModelPerformanceMetricThresholdBase. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ClassificationModelPerformanceMetricThreshold, RegressionModelPerformanceMetricThreshold + + All required parameters must be populated in order to send to Azure. + + :ivar model_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Classification" and "Regression". + :vartype model_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringModelType + :ivar threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + """ + + _validation = { + "model_type": {"required": True}, + } + + _attribute_map = { + "model_type": {"key": "modelType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + } + + _subtype_map = { + "model_type": { + "Classification": "ClassificationModelPerformanceMetricThreshold", + "Regression": "RegressionModelPerformanceMetricThreshold", + } + } + + def __init__(self, *, threshold: Optional["_models.MonitoringThreshold"] = None, **kwargs: Any) -> None: + """ + :keyword threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + """ + super().__init__(**kwargs) + self.model_type: Optional[str] = None + self.threshold = threshold + + +class ClassificationModelPerformanceMetricThreshold(ModelPerformanceMetricThresholdBase): + """ClassificationModelPerformanceMetricThreshold. + + All required parameters must be populated in order to send to Azure. + + :ivar model_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Classification" and "Regression". + :vartype model_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringModelType + :ivar threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + :ivar metric: [Required] The classification model performance to calculate. Required. Known + values are: "Accuracy", "Precision", and "Recall". + :vartype metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationModelPerformanceMetric + """ + + _validation = { + "model_type": {"required": True}, + "metric": {"required": True}, + } + + _attribute_map = { + "model_type": {"key": "modelType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, + } + + def __init__( + self, + *, + metric: Union[str, "_models.ClassificationModelPerformanceMetric"], + threshold: Optional["_models.MonitoringThreshold"] = None, + **kwargs: Any + ) -> None: + """ + :keyword threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + :keyword metric: [Required] The classification model performance to calculate. Required. Known + values are: "Accuracy", "Precision", and "Recall". + :paramtype metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationModelPerformanceMetric + """ + super().__init__(threshold=threshold, **kwargs) + self.model_type: str = "Classification" + self.metric = metric + + class TrainingSettings(_serialization.Model): """Training related configuration. @@ -4283,6 +5793,14 @@ class TrainingSettings(_serialization.Model): :ivar stack_ensemble_settings: Stack ensemble settings for stack ensemble run. :vartype stack_ensemble_settings: ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + :ivar training_mode: TrainingMode mode - Setting to 'auto' is same as setting it to + 'non-distributed' for now, however in the future may result in mixed mode or heuristics based + mode selection. Default is 'auto'. + If 'Distributed' then only distributed featurization is used and distributed algorithms are + chosen. + If 'NonDistributed' then only non distributed algorithms are chosen. Known values are: "Auto", + "Distributed", and "NonDistributed". + :vartype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode """ _attribute_map = { @@ -4293,6 +5811,7 @@ class TrainingSettings(_serialization.Model): "enable_vote_ensemble": {"key": "enableVoteEnsemble", "type": "bool"}, "ensemble_model_download_timeout": {"key": "ensembleModelDownloadTimeout", "type": "duration"}, "stack_ensemble_settings": {"key": "stackEnsembleSettings", "type": "StackEnsembleSettings"}, + "training_mode": {"key": "trainingMode", "type": "str"}, } def __init__( @@ -4305,6 +5824,7 @@ def __init__( enable_vote_ensemble: bool = True, ensemble_model_download_timeout: datetime.timedelta = "PT5M", stack_ensemble_settings: Optional["_models.StackEnsembleSettings"] = None, + training_mode: Optional[Union[str, "_models.TrainingMode"]] = None, **kwargs: Any ) -> None: """ @@ -4325,6 +5845,14 @@ def __init__( :keyword stack_ensemble_settings: Stack ensemble settings for stack ensemble run. :paramtype stack_ensemble_settings: ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + :keyword training_mode: TrainingMode mode - Setting to 'auto' is same as setting it to + 'non-distributed' for now, however in the future may result in mixed mode or heuristics based + mode selection. Default is 'auto'. + If 'Distributed' then only distributed featurization is used and distributed algorithms are + chosen. + If 'NonDistributed' then only non distributed algorithms are chosen. Known values are: "Auto", + "Distributed", and "NonDistributed". + :paramtype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode """ super().__init__(**kwargs) self.enable_dnn_training = enable_dnn_training @@ -4334,6 +5862,7 @@ def __init__( self.enable_vote_ensemble = enable_vote_ensemble self.ensemble_model_download_timeout = ensemble_model_download_timeout self.stack_ensemble_settings = stack_ensemble_settings + self.training_mode = training_mode class ClassificationTrainingSettings(TrainingSettings): @@ -4356,6 +5885,14 @@ class ClassificationTrainingSettings(TrainingSettings): :ivar stack_ensemble_settings: Stack ensemble settings for stack ensemble run. :vartype stack_ensemble_settings: ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + :ivar training_mode: TrainingMode mode - Setting to 'auto' is same as setting it to + 'non-distributed' for now, however in the future may result in mixed mode or heuristics based + mode selection. Default is 'auto'. + If 'Distributed' then only distributed featurization is used and distributed algorithms are + chosen. + If 'NonDistributed' then only non distributed algorithms are chosen. Known values are: "Auto", + "Distributed", and "NonDistributed". + :vartype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode :ivar allowed_training_algorithms: Allowed models for classification task. :vartype allowed_training_algorithms: list[str or ~azure.mgmt.machinelearningservices.models.ClassificationModels] @@ -4372,6 +5909,7 @@ class ClassificationTrainingSettings(TrainingSettings): "enable_vote_ensemble": {"key": "enableVoteEnsemble", "type": "bool"}, "ensemble_model_download_timeout": {"key": "ensembleModelDownloadTimeout", "type": "duration"}, "stack_ensemble_settings": {"key": "stackEnsembleSettings", "type": "StackEnsembleSettings"}, + "training_mode": {"key": "trainingMode", "type": "str"}, "allowed_training_algorithms": {"key": "allowedTrainingAlgorithms", "type": "[str]"}, "blocked_training_algorithms": {"key": "blockedTrainingAlgorithms", "type": "[str]"}, } @@ -4386,6 +5924,7 @@ def __init__( enable_vote_ensemble: bool = True, ensemble_model_download_timeout: datetime.timedelta = "PT5M", stack_ensemble_settings: Optional["_models.StackEnsembleSettings"] = None, + training_mode: Optional[Union[str, "_models.TrainingMode"]] = None, allowed_training_algorithms: Optional[List[Union[str, "_models.ClassificationModels"]]] = None, blocked_training_algorithms: Optional[List[Union[str, "_models.ClassificationModels"]]] = None, **kwargs: Any @@ -4408,6 +5947,14 @@ def __init__( :keyword stack_ensemble_settings: Stack ensemble settings for stack ensemble run. :paramtype stack_ensemble_settings: ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + :keyword training_mode: TrainingMode mode - Setting to 'auto' is same as setting it to + 'non-distributed' for now, however in the future may result in mixed mode or heuristics based + mode selection. Default is 'auto'. + If 'Distributed' then only distributed featurization is used and distributed algorithms are + chosen. + If 'NonDistributed' then only non distributed algorithms are chosen. Known values are: "Auto", + "Distributed", and "NonDistributed". + :paramtype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode :keyword allowed_training_algorithms: Allowed models for classification task. :paramtype allowed_training_algorithms: list[str or ~azure.mgmt.machinelearningservices.models.ClassificationModels] @@ -4423,6 +5970,7 @@ def __init__( enable_vote_ensemble=enable_vote_ensemble, ensemble_model_download_timeout=ensemble_model_download_timeout, stack_ensemble_settings=stack_ensemble_settings, + training_mode=training_mode, **kwargs ) self.allowed_training_algorithms = allowed_training_algorithms @@ -4449,56 +5997,161 @@ def __init__(self, *, properties: Optional["_models.ScaleSettingsInformation"] = self.properties = properties -class CodeConfiguration(_serialization.Model): - """Configuration for a scoring code asset. +class ExportSummary(_serialization.Model): + """ExportSummary. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CsvExportSummary, CocoExportSummary, DatasetExportSummary + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar code_id: ARM resource ID of the code asset. - :vartype code_id: str - :ivar scoring_script: [Required] The script to execute on startup. eg. "score.py". Required. - :vartype scoring_script: str + :ivar end_date_time: The time when the export was completed. + :vartype end_date_time: ~datetime.datetime + :ivar exported_row_count: The total number of labeled datapoints exported. + :vartype exported_row_count: int + :ivar format: [Required] The format of exported labels, also as the discriminator. Required. + Known values are: "Dataset", "Coco", and "CSV". + :vartype format: str or ~azure.mgmt.machinelearningservices.models.ExportFormatType + :ivar labeling_job_id: Name and identifier of the job containing exported labels. + :vartype labeling_job_id: str + :ivar start_date_time: The time when the export was requested. + :vartype start_date_time: ~datetime.datetime """ _validation = { - "scoring_script": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "end_date_time": {"readonly": True}, + "exported_row_count": {"readonly": True}, + "format": {"required": True}, + "labeling_job_id": {"readonly": True}, + "start_date_time": {"readonly": True}, } _attribute_map = { - "code_id": {"key": "codeId", "type": "str"}, - "scoring_script": {"key": "scoringScript", "type": "str"}, + "end_date_time": {"key": "endDateTime", "type": "iso-8601"}, + "exported_row_count": {"key": "exportedRowCount", "type": "int"}, + "format": {"key": "format", "type": "str"}, + "labeling_job_id": {"key": "labelingJobId", "type": "str"}, + "start_date_time": {"key": "startDateTime", "type": "iso-8601"}, } - def __init__(self, *, scoring_script: str, code_id: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword code_id: ARM resource ID of the code asset. - :paramtype code_id: str - :keyword scoring_script: [Required] The script to execute on startup. eg. "score.py". Required. - :paramtype scoring_script: str - """ + _subtype_map = { + "format": {"CSV": "CsvExportSummary", "Coco": "CocoExportSummary", "Dataset": "DatasetExportSummary"} + } + + def __init__(self, **kwargs: Any) -> None: + """ """ super().__init__(**kwargs) - self.code_id = code_id - self.scoring_script = scoring_script + self.end_date_time = None + self.exported_row_count = None + self.format: Optional[str] = None + self.labeling_job_id = None + self.start_date_time = None -class CodeContainer(Resource): - """Azure Resource Manager resource envelope. +class CocoExportSummary(ExportSummary): + """CocoExportSummary. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar end_date_time: The time when the export was completed. + :vartype end_date_time: ~datetime.datetime + :ivar exported_row_count: The total number of labeled datapoints exported. + :vartype exported_row_count: int + :ivar format: [Required] The format of exported labels, also as the discriminator. Required. + Known values are: "Dataset", "Coco", and "CSV". + :vartype format: str or ~azure.mgmt.machinelearningservices.models.ExportFormatType + :ivar labeling_job_id: Name and identifier of the job containing exported labels. + :vartype labeling_job_id: str + :ivar start_date_time: The time when the export was requested. + :vartype start_date_time: ~datetime.datetime + :ivar container_name: The container name to which the labels will be exported. + :vartype container_name: str + :ivar snapshot_path: The output path where the labels will be exported. + :vartype snapshot_path: str + """ + + _validation = { + "end_date_time": {"readonly": True}, + "exported_row_count": {"readonly": True}, + "format": {"required": True}, + "labeling_job_id": {"readonly": True}, + "start_date_time": {"readonly": True}, + "container_name": {"readonly": True}, + "snapshot_path": {"readonly": True}, + } + + _attribute_map = { + "end_date_time": {"key": "endDateTime", "type": "iso-8601"}, + "exported_row_count": {"key": "exportedRowCount", "type": "int"}, + "format": {"key": "format", "type": "str"}, + "labeling_job_id": {"key": "labelingJobId", "type": "str"}, + "start_date_time": {"key": "startDateTime", "type": "iso-8601"}, + "container_name": {"key": "containerName", "type": "str"}, + "snapshot_path": {"key": "snapshotPath", "type": "str"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.format: str = "Coco" + self.container_name = None + self.snapshot_path = None + + +class CodeConfiguration(_serialization.Model): + """Configuration for a scoring code asset. + + All required parameters must be populated in order to send to Azure. + + :ivar code_id: ARM resource ID of the code asset. + :vartype code_id: str + :ivar scoring_script: [Required] The script to execute on startup. eg. "score.py". Required. + :vartype scoring_script: str + """ + + _validation = { + "scoring_script": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "code_id": {"key": "codeId", "type": "str"}, + "scoring_script": {"key": "scoringScript", "type": "str"}, + } + + def __init__(self, *, scoring_script: str, code_id: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword code_id: ARM resource ID of the code asset. + :paramtype code_id: str + :keyword scoring_script: [Required] The script to execute on startup. eg. "score.py". Required. + :paramtype scoring_script: str + """ + super().__init__(**kwargs) + self.code_id = code_id + self.scoring_script = scoring_script + + +class CodeContainer(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.CodeContainerProperties """ @@ -4678,9 +6331,13 @@ class CodeVersionProperties(AssetBase): :vartype properties: dict[str, str] :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] - :ivar is_anonymous: If the name version are system generated (anonymous registration). + :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :ivar is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. :vartype is_anonymous: bool - :ivar is_archived: Is the asset archived?. + :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. :vartype is_archived: bool :ivar code_uri: Uri where code is located. :vartype code_uri: str @@ -4698,6 +6355,7 @@ class CodeVersionProperties(AssetBase): "description": {"key": "description", "type": "str"}, "properties": {"key": "properties", "type": "{str}"}, "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, "is_anonymous": {"key": "isAnonymous", "type": "bool"}, "is_archived": {"key": "isArchived", "type": "bool"}, "code_uri": {"key": "codeUri", "type": "str"}, @@ -4710,6 +6368,7 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, is_anonymous: bool = False, is_archived: bool = False, code_uri: Optional[str] = None, @@ -4722,9 +6381,13 @@ def __init__( :paramtype properties: dict[str, str] :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] - :keyword is_anonymous: If the name version are system generated (anonymous registration). + :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :keyword is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. :paramtype is_anonymous: bool - :keyword is_archived: Is the asset archived?. + :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. :paramtype is_archived: bool :keyword code_uri: Uri where code is located. :paramtype code_uri: str @@ -4733,6 +6396,7 @@ def __init__( description=description, properties=properties, tags=tags, + auto_delete_setting=auto_delete_setting, is_anonymous=is_anonymous, is_archived=is_archived, **kwargs @@ -4771,6 +6435,62 @@ def __init__( self.value = value +class Collection(_serialization.Model): + """Collection. + + :ivar client_id: The msi client id used to collect logging to blob storage. If it's + null,backend will pick a registered endpoint identity to auth. + :vartype client_id: str + :ivar data_collection_mode: Enable or disable data collection. Known values are: "Enabled" and + "Disabled". + :vartype data_collection_mode: str or + ~azure.mgmt.machinelearningservices.models.DataCollectionMode + :ivar data_id: The data asset arm resource id. Client side will ensure data asset is pointing + to the blob storage, and backend will collect data to the blob storage. + :vartype data_id: str + :ivar sampling_rate: The sampling rate for collection. Sampling rate 1.0 means we collect 100% + of data by default. + :vartype sampling_rate: float + """ + + _attribute_map = { + "client_id": {"key": "clientId", "type": "str"}, + "data_collection_mode": {"key": "dataCollectionMode", "type": "str"}, + "data_id": {"key": "dataId", "type": "str"}, + "sampling_rate": {"key": "samplingRate", "type": "float"}, + } + + def __init__( + self, + *, + client_id: Optional[str] = None, + data_collection_mode: Optional[Union[str, "_models.DataCollectionMode"]] = None, + data_id: Optional[str] = None, + sampling_rate: float = 1, + **kwargs: Any + ) -> None: + """ + :keyword client_id: The msi client id used to collect logging to blob storage. If it's + null,backend will pick a registered endpoint identity to auth. + :paramtype client_id: str + :keyword data_collection_mode: Enable or disable data collection. Known values are: "Enabled" + and "Disabled". + :paramtype data_collection_mode: str or + ~azure.mgmt.machinelearningservices.models.DataCollectionMode + :keyword data_id: The data asset arm resource id. Client side will ensure data asset is + pointing to the blob storage, and backend will collect data to the blob storage. + :paramtype data_id: str + :keyword sampling_rate: The sampling rate for collection. Sampling rate 1.0 means we collect + 100% of data by default. + :paramtype sampling_rate: float + """ + super().__init__(**kwargs) + self.client_id = client_id + self.data_collection_mode = data_collection_mode + self.data_id = data_id + self.sampling_rate = sampling_rate + + class ColumnTransformer(_serialization.Model): """Column transformer parameters. @@ -4828,22 +6548,30 @@ class CommandJob(JobBaseProperties): # pylint: disable=too-many-instance-attrib :ivar is_archived: Is the asset archived?. :vartype is_archived: bool :ivar job_type: [Required] Specifies the type of job. Required. Known values are: "AutoML", - "Command", "Sweep", and "Pipeline". + "Command", "Labeling", "Sweep", "Pipeline", and "Spark". :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :ivar notification_setting: Notification setting for the job. + :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting + :ivar secrets_configuration: Configuration for secrets to be made available during runtime. + :vartype secrets_configuration: dict[str, + ~azure.mgmt.machinelearningservices.models.SecretConfiguration] :ivar services: List of JobEndpoints. For local jobs, a job endpoint will have an endpoint value of FileStreamObject. :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", - "Canceled", "NotResponding", "Paused", and "Unknown". + "Canceled", "NotResponding", "Paused", "Unknown", and "Scheduled". :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus + :ivar autologger_settings: Distribution configuration of the job. If set, this should be one of + Mpi, Tensorflow, PyTorch, or null. + :vartype autologger_settings: ~azure.mgmt.machinelearningservices.models.AutologgerSettings :ivar code_id: ARM resource ID of the code asset. :vartype code_id: str :ivar command: [Required] The command to execute on startup of the job. eg. "python train.py". Required. :vartype command: str :ivar distribution: Distribution configuration of the job. If set, this should be one of Mpi, - Tensorflow, PyTorch, or null. + Tensorflow, PyTorch, Ray, or null. :vartype distribution: ~azure.mgmt.machinelearningservices.models.DistributionConfiguration :ivar environment_id: [Required] The ARM resource ID of the Environment specification for the job. Required. @@ -4858,6 +6586,8 @@ class CommandJob(JobBaseProperties): # pylint: disable=too-many-instance-attrib :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] :ivar parameters: Input parameters. :vartype parameters: JSON + :ivar queue_settings: Queue settings for the job. + :vartype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings :ivar resources: Compute Resource configuration for the job. :vartype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration """ @@ -4881,8 +6611,11 @@ class CommandJob(JobBaseProperties): # pylint: disable=too-many-instance-attrib "identity": {"key": "identity", "type": "IdentityConfiguration"}, "is_archived": {"key": "isArchived", "type": "bool"}, "job_type": {"key": "jobType", "type": "str"}, + "notification_setting": {"key": "notificationSetting", "type": "NotificationSetting"}, + "secrets_configuration": {"key": "secretsConfiguration", "type": "{SecretConfiguration}"}, "services": {"key": "services", "type": "{JobService}"}, "status": {"key": "status", "type": "str"}, + "autologger_settings": {"key": "autologgerSettings", "type": "AutologgerSettings"}, "code_id": {"key": "codeId", "type": "str"}, "command": {"key": "command", "type": "str"}, "distribution": {"key": "distribution", "type": "DistributionConfiguration"}, @@ -4892,10 +6625,11 @@ class CommandJob(JobBaseProperties): # pylint: disable=too-many-instance-attrib "limits": {"key": "limits", "type": "CommandJobLimits"}, "outputs": {"key": "outputs", "type": "{JobOutput}"}, "parameters": {"key": "parameters", "type": "object"}, + "queue_settings": {"key": "queueSettings", "type": "QueueSettings"}, "resources": {"key": "resources", "type": "JobResourceConfiguration"}, } - def __init__( + def __init__( # pylint: disable=too-many-locals self, *, command: str, @@ -4909,13 +6643,17 @@ def __init__( experiment_name: str = "Default", identity: Optional["_models.IdentityConfiguration"] = None, is_archived: bool = False, + notification_setting: Optional["_models.NotificationSetting"] = None, + secrets_configuration: Optional[Dict[str, "_models.SecretConfiguration"]] = None, services: Optional[Dict[str, "_models.JobService"]] = None, + autologger_settings: Optional["_models.AutologgerSettings"] = None, code_id: Optional[str] = None, distribution: Optional["_models.DistributionConfiguration"] = None, environment_variables: Optional[Dict[str, str]] = None, inputs: Optional[Dict[str, "_models.JobInput"]] = None, limits: Optional["_models.CommandJobLimits"] = None, outputs: Optional[Dict[str, "_models.JobOutput"]] = None, + queue_settings: Optional["_models.QueueSettings"] = None, resources: Optional["_models.JobResourceConfiguration"] = None, **kwargs: Any ) -> None: @@ -4941,16 +6679,24 @@ def __init__( :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration :keyword is_archived: Is the asset archived?. :paramtype is_archived: bool + :keyword notification_setting: Notification setting for the job. + :paramtype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting + :keyword secrets_configuration: Configuration for secrets to be made available during runtime. + :paramtype secrets_configuration: dict[str, + ~azure.mgmt.machinelearningservices.models.SecretConfiguration] :keyword services: List of JobEndpoints. For local jobs, a job endpoint will have an endpoint value of FileStreamObject. :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :keyword autologger_settings: Distribution configuration of the job. If set, this should be one + of Mpi, Tensorflow, PyTorch, or null. + :paramtype autologger_settings: ~azure.mgmt.machinelearningservices.models.AutologgerSettings :keyword code_id: ARM resource ID of the code asset. :paramtype code_id: str :keyword command: [Required] The command to execute on startup of the job. eg. "python train.py". Required. :paramtype command: str :keyword distribution: Distribution configuration of the job. If set, this should be one of - Mpi, Tensorflow, PyTorch, or null. + Mpi, Tensorflow, PyTorch, Ray, or null. :paramtype distribution: ~azure.mgmt.machinelearningservices.models.DistributionConfiguration :keyword environment_id: [Required] The ARM resource ID of the Environment specification for the job. Required. @@ -4963,6 +6709,8 @@ def __init__( :paramtype limits: ~azure.mgmt.machinelearningservices.models.CommandJobLimits :keyword outputs: Mapping of output data bindings used in the job. :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :keyword queue_settings: Queue settings for the job. + :paramtype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings :keyword resources: Compute Resource configuration for the job. :paramtype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration """ @@ -4976,10 +6724,13 @@ def __init__( experiment_name=experiment_name, identity=identity, is_archived=is_archived, + notification_setting=notification_setting, + secrets_configuration=secrets_configuration, services=services, **kwargs ) self.job_type: str = "Command" + self.autologger_settings = autologger_settings self.code_id = code_id self.command = command self.distribution = distribution @@ -4989,6 +6740,7 @@ def __init__( self.limits = limits self.outputs = outputs self.parameters = None + self.queue_settings = queue_settings self.resources = resources @@ -5062,6 +6814,26 @@ def __init__(self, *, timeout: Optional[datetime.timedelta] = None, **kwargs: An self.job_limits_type: str = "Command" +class ComponentConfiguration(_serialization.Model): + """Used for sweep over component. + + :ivar pipeline_settings: Pipeline settings, for things like ContinueRunOnStepFailure etc. + :vartype pipeline_settings: JSON + """ + + _attribute_map = { + "pipeline_settings": {"key": "pipelineSettings", "type": "object"}, + } + + def __init__(self, *, pipeline_settings: Optional[JSON] = None, **kwargs: Any) -> None: + """ + :keyword pipeline_settings: Pipeline settings, for things like ContinueRunOnStepFailure etc. + :paramtype pipeline_settings: JSON + """ + super().__init__(**kwargs) + self.pipeline_settings = pipeline_settings + + class ComponentContainer(Resource): """Azure Resource Manager resource envelope. @@ -5270,9 +7042,13 @@ class ComponentVersionProperties(AssetBase): :vartype properties: dict[str, str] :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] - :ivar is_anonymous: If the name version are system generated (anonymous registration). + :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :ivar is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. :vartype is_anonymous: bool - :ivar is_archived: Is the asset archived?. + :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. :vartype is_archived: bool :ivar component_spec: Defines Component definition details. @@ -5287,6 +7063,8 @@ class ComponentVersionProperties(AssetBase): "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.AssetProvisioningState + :ivar stage: Stage in the component lifecycle. + :vartype stage: str """ _validation = { @@ -5297,10 +7075,12 @@ class ComponentVersionProperties(AssetBase): "description": {"key": "description", "type": "str"}, "properties": {"key": "properties", "type": "{str}"}, "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, "is_anonymous": {"key": "isAnonymous", "type": "bool"}, "is_archived": {"key": "isArchived", "type": "bool"}, "component_spec": {"key": "componentSpec", "type": "object"}, "provisioning_state": {"key": "provisioningState", "type": "str"}, + "stage": {"key": "stage", "type": "str"}, } def __init__( @@ -5309,9 +7089,11 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, is_anonymous: bool = False, is_archived: bool = False, component_spec: Optional[JSON] = None, + stage: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -5321,9 +7103,13 @@ def __init__( :paramtype properties: dict[str, str] :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] - :keyword is_anonymous: If the name version are system generated (anonymous registration). + :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :keyword is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. :paramtype is_anonymous: bool - :keyword is_archived: Is the asset archived?. + :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. :paramtype is_archived: bool :keyword component_spec: Defines Component definition details. @@ -5334,17 +7120,21 @@ def __init__( href="https://docs.microsoft.com/en-us/azure/machine-learning/reference-yaml-component-command" />. :paramtype component_spec: JSON + :keyword stage: Stage in the component lifecycle. + :paramtype stage: str """ super().__init__( description=description, properties=properties, tags=tags, + auto_delete_setting=auto_delete_setting, is_anonymous=is_anonymous, is_archived=is_archived, **kwargs ) self.component_spec = component_spec self.provisioning_state = None + self.stage = stage class ComponentVersionResourceArmPaginatedResult(_serialization.Model): @@ -5534,6 +7324,31 @@ def __init__( self.endpoint_uri = endpoint_uri +class ComputeInstanceAutologgerSettings(_serialization.Model): + """Specifies settings for autologger. + + :ivar mlflow_autologger: Indicates whether mlflow autologger is enabled for notebooks. Known + values are: "Enabled" and "Disabled". + :vartype mlflow_autologger: str or ~azure.mgmt.machinelearningservices.models.MlflowAutologger + """ + + _attribute_map = { + "mlflow_autologger": {"key": "mlflowAutologger", "type": "str"}, + } + + def __init__( + self, *, mlflow_autologger: Optional[Union[str, "_models.MlflowAutologger"]] = None, **kwargs: Any + ) -> None: + """ + :keyword mlflow_autologger: Indicates whether mlflow autologger is enabled for notebooks. Known + values are: "Enabled" and "Disabled". + :paramtype mlflow_autologger: str or + ~azure.mgmt.machinelearningservices.models.MlflowAutologger + """ + super().__init__(**kwargs) + self.mlflow_autologger = mlflow_autologger + + class ComputeInstanceConnectivityEndpoints(_serialization.Model): """Defines all connectivity endpoints and properties for an ComputeInstance. @@ -5827,13 +7642,13 @@ class ComputeInstanceLastOperation(_serialization.Model): """The last operation on ComputeInstance. :ivar operation_name: Name of the last operation. Known values are: "Create", "Start", "Stop", - "Restart", "Reimage", and "Delete". + "Restart", "Resize", "Reimage", and "Delete". :vartype operation_name: str or ~azure.mgmt.machinelearningservices.models.OperationName :ivar operation_time: Time of the last operation. :vartype operation_time: ~datetime.datetime :ivar operation_status: Operation status. Known values are: "InProgress", "Succeeded", - "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ReimageFailed", and - "DeleteFailed". + "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ResizeFailed", "ReimageFailed", + and "DeleteFailed". :vartype operation_status: str or ~azure.mgmt.machinelearningservices.models.OperationStatus :ivar operation_trigger: Trigger of operation. Known values are: "User", "Schedule", and "IdleShutdown". @@ -5858,13 +7673,13 @@ def __init__( ) -> None: """ :keyword operation_name: Name of the last operation. Known values are: "Create", "Start", - "Stop", "Restart", "Reimage", and "Delete". + "Stop", "Restart", "Resize", "Reimage", and "Delete". :paramtype operation_name: str or ~azure.mgmt.machinelearningservices.models.OperationName :keyword operation_time: Time of the last operation. :paramtype operation_time: ~datetime.datetime :keyword operation_status: Operation status. Known values are: "InProgress", "Succeeded", - "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ReimageFailed", and - "DeleteFailed". + "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ResizeFailed", "ReimageFailed", + and "DeleteFailed". :paramtype operation_status: str or ~azure.mgmt.machinelearningservices.models.OperationStatus :keyword operation_trigger: Trigger of operation. Known values are: "User", "Schedule", and "IdleShutdown". @@ -5893,6 +7708,9 @@ class ComputeInstanceProperties(_serialization.Model): # pylint: disable=too-ma depending on his/her assigned role. Known values are: "Personal" and "Shared". :vartype application_sharing_policy: str or ~azure.mgmt.machinelearningservices.models.ApplicationSharingPolicy + :ivar autologger_settings: Specifies settings for autologger. + :vartype autologger_settings: + ~azure.mgmt.machinelearningservices.models.ComputeInstanceAutologgerSettings :ivar ssh_settings: Specifies policy and settings for SSH access. :vartype ssh_settings: ~azure.mgmt.machinelearningservices.models.ComputeInstanceSshSettings :ivar custom_services: List of Custom Services added to the compute. @@ -5913,14 +7731,20 @@ class ComputeInstanceProperties(_serialization.Model): # pylint: disable=too-ma :ivar errors: Collection of errors encountered on this ComputeInstance. :vartype errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] :ivar state: The current state of this ComputeInstance. Known values are: "Creating", - "CreateFailed", "Deleting", "Running", "Restarting", "JobRunning", "SettingUp", "SetupFailed", - "Starting", "Stopped", "Stopping", "UserSettingUp", "UserSetupFailed", "Unknown", and - "Unusable". + "CreateFailed", "Deleting", "Running", "Restarting", "Resizing", "JobRunning", "SettingUp", + "SetupFailed", "Starting", "Stopped", "Stopping", "UserSettingUp", "UserSetupFailed", + "Unknown", and "Unusable". :vartype state: str or ~azure.mgmt.machinelearningservices.models.ComputeInstanceState :ivar compute_instance_authorization_type: The Compute Instance Authorization type. Available values are personal (default). "personal" :vartype compute_instance_authorization_type: str or ~azure.mgmt.machinelearningservices.models.ComputeInstanceAuthorizationType + :ivar enable_os_patching: Enable Auto OS Patching. Possible values are: true, false. + :vartype enable_os_patching: bool + :ivar release_quota_on_stop: Release quota if compute instance stopped. Possible values are: + true - release quota if compute instance stopped. false - don't release quota when compute + instance stopped. + :vartype release_quota_on_stop: bool :ivar personal_compute_instance_settings: Settings for a personal compute instance. :vartype personal_compute_instance_settings: ~azure.mgmt.machinelearningservices.models.PersonalComputeInstanceSettings @@ -5931,6 +7755,9 @@ class ComputeInstanceProperties(_serialization.Model): # pylint: disable=too-ma ~azure.mgmt.machinelearningservices.models.ComputeInstanceLastOperation :ivar schedules: The list of schedules to be applied on the computes. :vartype schedules: ~azure.mgmt.machinelearningservices.models.ComputeSchedules + :ivar idle_time_before_shutdown: Stops compute instance after user defined period of + inactivity. Time is defined in ISO8601 format. Minimum is 15 min, maximum is 3 days. + :vartype idle_time_before_shutdown: str :ivar enable_node_public_ip: Enable or disable node public IP address provisioning. Possible values are: Possible values are: true - Indicates that the compute nodes will have public IPs provisioned. false - Indicates that the compute nodes will have a private endpoint and no @@ -5964,6 +7791,7 @@ class ComputeInstanceProperties(_serialization.Model): # pylint: disable=too-ma "vm_size": {"key": "vmSize", "type": "str"}, "subnet": {"key": "subnet", "type": "ResourceId"}, "application_sharing_policy": {"key": "applicationSharingPolicy", "type": "str"}, + "autologger_settings": {"key": "autologgerSettings", "type": "ComputeInstanceAutologgerSettings"}, "ssh_settings": {"key": "sshSettings", "type": "ComputeInstanceSshSettings"}, "custom_services": {"key": "customServices", "type": "[CustomService]"}, "os_image_metadata": {"key": "osImageMetadata", "type": "ImageMetadata"}, @@ -5973,6 +7801,8 @@ class ComputeInstanceProperties(_serialization.Model): # pylint: disable=too-ma "errors": {"key": "errors", "type": "[ErrorResponse]"}, "state": {"key": "state", "type": "str"}, "compute_instance_authorization_type": {"key": "computeInstanceAuthorizationType", "type": "str"}, + "enable_os_patching": {"key": "enableOSPatching", "type": "bool"}, + "release_quota_on_stop": {"key": "releaseQuotaOnStop", "type": "bool"}, "personal_compute_instance_settings": { "key": "personalComputeInstanceSettings", "type": "PersonalComputeInstanceSettings", @@ -5980,6 +7810,7 @@ class ComputeInstanceProperties(_serialization.Model): # pylint: disable=too-ma "setup_scripts": {"key": "setupScripts", "type": "SetupScripts"}, "last_operation": {"key": "lastOperation", "type": "ComputeInstanceLastOperation"}, "schedules": {"key": "schedules", "type": "ComputeSchedules"}, + "idle_time_before_shutdown": {"key": "idleTimeBeforeShutdown", "type": "str"}, "enable_node_public_ip": {"key": "enableNodePublicIp", "type": "bool"}, "containers": {"key": "containers", "type": "[ComputeInstanceContainer]"}, "data_disks": {"key": "dataDisks", "type": "[ComputeInstanceDataDisk]"}, @@ -5987,19 +7818,23 @@ class ComputeInstanceProperties(_serialization.Model): # pylint: disable=too-ma "versions": {"key": "versions", "type": "ComputeInstanceVersion"}, } - def __init__( + def __init__( # pylint: disable=too-many-locals self, *, vm_size: Optional[str] = None, subnet: Optional["_models.ResourceId"] = None, application_sharing_policy: Union[str, "_models.ApplicationSharingPolicy"] = "Shared", + autologger_settings: Optional["_models.ComputeInstanceAutologgerSettings"] = None, ssh_settings: Optional["_models.ComputeInstanceSshSettings"] = None, custom_services: Optional[List["_models.CustomService"]] = None, compute_instance_authorization_type: Union[str, "_models.ComputeInstanceAuthorizationType"] = "personal", + enable_os_patching: bool = False, + release_quota_on_stop: bool = False, personal_compute_instance_settings: Optional["_models.PersonalComputeInstanceSettings"] = None, setup_scripts: Optional["_models.SetupScripts"] = None, schedules: Optional["_models.ComputeSchedules"] = None, - enable_node_public_ip: Optional[bool] = None, + idle_time_before_shutdown: Optional[str] = None, + enable_node_public_ip: bool = True, **kwargs: Any ) -> None: """ @@ -6013,6 +7848,9 @@ def __init__( depending on his/her assigned role. Known values are: "Personal" and "Shared". :paramtype application_sharing_policy: str or ~azure.mgmt.machinelearningservices.models.ApplicationSharingPolicy + :keyword autologger_settings: Specifies settings for autologger. + :paramtype autologger_settings: + ~azure.mgmt.machinelearningservices.models.ComputeInstanceAutologgerSettings :keyword ssh_settings: Specifies policy and settings for SSH access. :paramtype ssh_settings: ~azure.mgmt.machinelearningservices.models.ComputeInstanceSshSettings :keyword custom_services: List of Custom Services added to the compute. @@ -6021,6 +7859,12 @@ def __init__( Available values are personal (default). "personal" :paramtype compute_instance_authorization_type: str or ~azure.mgmt.machinelearningservices.models.ComputeInstanceAuthorizationType + :keyword enable_os_patching: Enable Auto OS Patching. Possible values are: true, false. + :paramtype enable_os_patching: bool + :keyword release_quota_on_stop: Release quota if compute instance stopped. Possible values are: + true - release quota if compute instance stopped. false - don't release quota when compute + instance stopped. + :paramtype release_quota_on_stop: bool :keyword personal_compute_instance_settings: Settings for a personal compute instance. :paramtype personal_compute_instance_settings: ~azure.mgmt.machinelearningservices.models.PersonalComputeInstanceSettings @@ -6028,6 +7872,9 @@ def __init__( :paramtype setup_scripts: ~azure.mgmt.machinelearningservices.models.SetupScripts :keyword schedules: The list of schedules to be applied on the computes. :paramtype schedules: ~azure.mgmt.machinelearningservices.models.ComputeSchedules + :keyword idle_time_before_shutdown: Stops compute instance after user defined period of + inactivity. Time is defined in ISO8601 format. Minimum is 15 min, maximum is 3 days. + :paramtype idle_time_before_shutdown: str :keyword enable_node_public_ip: Enable or disable node public IP address provisioning. Possible values are: Possible values are: true - Indicates that the compute nodes will have public IPs provisioned. false - Indicates that the compute nodes will have a private endpoint and no @@ -6038,6 +7885,7 @@ def __init__( self.vm_size = vm_size self.subnet = subnet self.application_sharing_policy = application_sharing_policy + self.autologger_settings = autologger_settings self.ssh_settings = ssh_settings self.custom_services = custom_services self.os_image_metadata = None @@ -6047,10 +7895,13 @@ def __init__( self.errors = None self.state = None self.compute_instance_authorization_type = compute_instance_authorization_type + self.enable_os_patching = enable_os_patching + self.release_quota_on_stop = release_quota_on_stop self.personal_compute_instance_settings = personal_compute_instance_settings self.setup_scripts = setup_scripts self.last_operation = None self.schedules = schedules + self.idle_time_before_shutdown = idle_time_before_shutdown self.enable_node_public_ip = enable_node_public_ip self.containers = None self.data_disks = None @@ -6234,6 +8085,26 @@ def __init__( self.system_data = None +class ComputeRuntimeDto(_serialization.Model): + """ComputeRuntimeDto. + + :ivar spark_runtime_version: + :vartype spark_runtime_version: str + """ + + _attribute_map = { + "spark_runtime_version": {"key": "sparkRuntimeVersion", "type": "str"}, + } + + def __init__(self, *, spark_runtime_version: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword spark_runtime_version: + :paramtype spark_runtime_version: str + """ + super().__init__(**kwargs) + self.spark_runtime_version = spark_runtime_version + + class ComputeSchedules(_serialization.Model): """The list of schedules to be applied on the computes. @@ -6417,7 +8288,7 @@ def __init__( class CosmosDbSettings(_serialization.Model): """CosmosDbSettings. - :ivar collections_throughput: The throughput of the collections in cosmosdb database. + :ivar collections_throughput: :vartype collections_throughput: int """ @@ -6427,13 +8298,81 @@ class CosmosDbSettings(_serialization.Model): def __init__(self, *, collections_throughput: Optional[int] = None, **kwargs: Any) -> None: """ - :keyword collections_throughput: The throughput of the collections in cosmosdb database. + :keyword collections_throughput: :paramtype collections_throughput: int """ super().__init__(**kwargs) self.collections_throughput = collections_throughput +class ScheduleActionBase(_serialization.Model): + """ScheduleActionBase. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + JobScheduleAction, CreateMonitorAction, ImportDataAction, EndpointScheduleAction + + All required parameters must be populated in order to send to Azure. + + :ivar action_type: [Required] Specifies the action type of the schedule. Required. Known values + are: "CreateJob", "InvokeBatchEndpoint", "ImportData", and "CreateMonitor". + :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType + """ + + _validation = { + "action_type": {"required": True}, + } + + _attribute_map = { + "action_type": {"key": "actionType", "type": "str"}, + } + + _subtype_map = { + "action_type": { + "CreateJob": "JobScheduleAction", + "CreateMonitor": "CreateMonitorAction", + "ImportData": "ImportDataAction", + "InvokeBatchEndpoint": "EndpointScheduleAction", + } + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.action_type: Optional[str] = None + + +class CreateMonitorAction(ScheduleActionBase): + """CreateMonitorAction. + + All required parameters must be populated in order to send to Azure. + + :ivar action_type: [Required] Specifies the action type of the schedule. Required. Known values + are: "CreateJob", "InvokeBatchEndpoint", "ImportData", and "CreateMonitor". + :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType + :ivar monitor_definition: [Required] Defines the monitor. Required. + :vartype monitor_definition: ~azure.mgmt.machinelearningservices.models.MonitorDefinition + """ + + _validation = { + "action_type": {"required": True}, + "monitor_definition": {"required": True}, + } + + _attribute_map = { + "action_type": {"key": "actionType", "type": "str"}, + "monitor_definition": {"key": "monitorDefinition", "type": "MonitorDefinition"}, + } + + def __init__(self, *, monitor_definition: "_models.MonitorDefinition", **kwargs: Any) -> None: + """ + :keyword monitor_definition: [Required] Defines the monitor. Required. + :paramtype monitor_definition: ~azure.mgmt.machinelearningservices.models.MonitorDefinition + """ + super().__init__(**kwargs) + self.action_type: str = "CreateMonitor" + self.monitor_definition = monitor_definition + + class Cron(_serialization.Model): """The workflow trigger cron for ComputeStartStop schedule type. @@ -6608,6 +8547,58 @@ def __init__( self.expression = expression +class CsvExportSummary(ExportSummary): + """CsvExportSummary. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar end_date_time: The time when the export was completed. + :vartype end_date_time: ~datetime.datetime + :ivar exported_row_count: The total number of labeled datapoints exported. + :vartype exported_row_count: int + :ivar format: [Required] The format of exported labels, also as the discriminator. Required. + Known values are: "Dataset", "Coco", and "CSV". + :vartype format: str or ~azure.mgmt.machinelearningservices.models.ExportFormatType + :ivar labeling_job_id: Name and identifier of the job containing exported labels. + :vartype labeling_job_id: str + :ivar start_date_time: The time when the export was requested. + :vartype start_date_time: ~datetime.datetime + :ivar container_name: The container name to which the labels will be exported. + :vartype container_name: str + :ivar snapshot_path: The output path where the labels will be exported. + :vartype snapshot_path: str + """ + + _validation = { + "end_date_time": {"readonly": True}, + "exported_row_count": {"readonly": True}, + "format": {"required": True}, + "labeling_job_id": {"readonly": True}, + "start_date_time": {"readonly": True}, + "container_name": {"readonly": True}, + "snapshot_path": {"readonly": True}, + } + + _attribute_map = { + "end_date_time": {"key": "endDateTime", "type": "iso-8601"}, + "exported_row_count": {"key": "exportedRowCount", "type": "int"}, + "format": {"key": "format", "type": "str"}, + "labeling_job_id": {"key": "labelingJobId", "type": "str"}, + "start_date_time": {"key": "startDateTime", "type": "iso-8601"}, + "container_name": {"key": "containerName", "type": "str"}, + "snapshot_path": {"key": "snapshotPath", "type": "str"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.format: str = "CSV" + self.container_name = None + self.snapshot_path = None + + class CustomForecastHorizon(ForecastHorizon): """The desired maximum forecast horizon in units of time-series frequency. @@ -6640,64 +8631,227 @@ def __init__(self, *, value: int, **kwargs: Any) -> None: self.value = value -class JobInput(_serialization.Model): - """Command job definition. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - CustomModelJobInput, LiteralJobInput, MLFlowModelJobInput, MLTableJobInput, - TritonModelJobInput, UriFileJobInput, UriFolderJobInput +class CustomInferencingServer(InferencingServer): + """Custom inference server configurations. All required parameters must be populated in order to send to Azure. - :ivar description: Description for the input. - :vartype description: str - :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: - "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and - "triton_model". - :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :ivar server_type: [Required] Inferencing server type for various targets. Required. Known + values are: "AzureMLOnline", "AzureMLBatch", "Triton", and "Custom". + :vartype server_type: str or ~azure.mgmt.machinelearningservices.models.InferencingServerType + :ivar inference_configuration: Inference configuration for custom inferencing. + :vartype inference_configuration: + ~azure.mgmt.machinelearningservices.models.OnlineInferenceConfiguration """ _validation = { - "job_input_type": {"required": True}, + "server_type": {"required": True}, } _attribute_map = { - "description": {"key": "description", "type": "str"}, - "job_input_type": {"key": "jobInputType", "type": "str"}, - } - - _subtype_map = { - "job_input_type": { - "custom_model": "CustomModelJobInput", - "literal": "LiteralJobInput", - "mlflow_model": "MLFlowModelJobInput", - "mltable": "MLTableJobInput", - "triton_model": "TritonModelJobInput", - "uri_file": "UriFileJobInput", - "uri_folder": "UriFolderJobInput", - } + "server_type": {"key": "serverType", "type": "str"}, + "inference_configuration": {"key": "inferenceConfiguration", "type": "OnlineInferenceConfiguration"}, } - def __init__(self, *, description: Optional[str] = None, **kwargs: Any) -> None: + def __init__( + self, *, inference_configuration: Optional["_models.OnlineInferenceConfiguration"] = None, **kwargs: Any + ) -> None: """ - :keyword description: Description for the input. - :paramtype description: str + :keyword inference_configuration: Inference configuration for custom inferencing. + :paramtype inference_configuration: + ~azure.mgmt.machinelearningservices.models.OnlineInferenceConfiguration """ super().__init__(**kwargs) - self.description = description - self.job_input_type: Optional[str] = None + self.server_type: str = "Custom" + self.inference_configuration = inference_configuration -class CustomModelJobInput(AssetJobInput, JobInput): - """CustomModelJobInput. +class CustomKeys(_serialization.Model): + """Custom Keys credential object. - All required parameters must be populated in order to send to Azure. + :ivar keys: Dictionary of :code:``. + :vartype keys: dict[str, str] + """ - :ivar description: Description for the input. - :vartype description: str - :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: - "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and - "triton_model". + _attribute_map = { + "keys": {"key": "keys", "type": "{str}"}, + } + + def __init__(self, *, keys: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: + """ + :keyword keys: Dictionary of :code:``. + :paramtype keys: dict[str, str] + """ + super().__init__(**kwargs) + self.keys = keys + + +class CustomKeysWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): + """Category:= CustomKeys + AuthType:= CustomKeys (as type discriminator) + Credentials:= {CustomKeys} as + Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.CustomKeys + Target:= {any value} + Use Metadata property bag for ApiVersion and other metadata fields. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", "AccessKey", + "ApiKey", and "CustomKeys". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", + "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", + "CognitiveSearch", "CognitiveService", and "CustomKeys". + :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar expiry_time: + :vartype expiry_time: ~datetime.datetime + :ivar metadata: Any object. + :vartype metadata: JSON + :ivar target: + :vartype target: str + :ivar credentials: Custom Keys credential object. + :vartype credentials: ~azure.mgmt.machinelearningservices.models.CustomKeys + """ + + _validation = { + "auth_type": {"required": True}, + } + + _attribute_map = { + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "metadata": {"key": "metadata", "type": "object"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "CustomKeys"}, + } + + def __init__( + self, + *, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, + expiry_time: Optional[datetime.datetime] = None, + metadata: Optional[JSON] = None, + target: Optional[str] = None, + credentials: Optional["_models.CustomKeys"] = None, + **kwargs: Any + ) -> None: + """ + :keyword category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", + "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", + "CognitiveSearch", "CognitiveService", and "CustomKeys". + :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :keyword expiry_time: + :paramtype expiry_time: ~datetime.datetime + :keyword metadata: Any object. + :paramtype metadata: JSON + :keyword target: + :paramtype target: str + :keyword credentials: Custom Keys credential object. + :paramtype credentials: ~azure.mgmt.machinelearningservices.models.CustomKeys + """ + super().__init__(category=category, expiry_time=expiry_time, metadata=metadata, target=target, **kwargs) + self.auth_type: str = "CustomKeys" + self.credentials = credentials + + +class CustomMetricThreshold(_serialization.Model): + """CustomMetricThreshold. + + All required parameters must be populated in order to send to Azure. + + :ivar metric: [Required] The user-defined metric to calculate. Required. + :vartype metric: str + :ivar threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + """ + + _validation = { + "metric": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "metric": {"key": "metric", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + } + + def __init__( + self, *, metric: str, threshold: Optional["_models.MonitoringThreshold"] = None, **kwargs: Any + ) -> None: + """ + :keyword metric: [Required] The user-defined metric to calculate. Required. + :paramtype metric: str + :keyword threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + """ + super().__init__(**kwargs) + self.metric = metric + self.threshold = threshold + + +class JobInput(_serialization.Model): + """Command job definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CustomModelJobInput, LiteralJobInput, MLFlowModelJobInput, MLTableJobInput, + TritonModelJobInput, UriFileJobInput, UriFolderJobInput + + All required parameters must be populated in order to send to Azure. + + :ivar description: Description for the input. + :vartype description: str + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + """ + + _validation = { + "job_input_type": {"required": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + } + + _subtype_map = { + "job_input_type": { + "custom_model": "CustomModelJobInput", + "literal": "LiteralJobInput", + "mlflow_model": "MLFlowModelJobInput", + "mltable": "MLTableJobInput", + "triton_model": "TritonModelJobInput", + "uri_file": "UriFileJobInput", + "uri_folder": "UriFolderJobInput", + } + } + + def __init__(self, *, description: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword description: Description for the input. + :paramtype description: str + """ + super().__init__(**kwargs) + self.description = description + self.job_input_type: Optional[str] = None + + +class CustomModelJobInput(AssetJobInput, JobInput): + """CustomModelJobInput. + + All required parameters must be populated in order to send to Azure. + + :ivar description: Description for the input. + :vartype description: str + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", "Download", "Direct", "EvalMount", and "EvalDownload". @@ -6798,7 +8952,14 @@ class CustomModelJobOutput(AssetJobOutput, JobOutput): :ivar job_output_type: [Required] Specifies the type of job. Required. Known values are: "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType - :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount" and "Upload". + :ivar asset_name: Output Asset Name. + :vartype asset_name: str + :ivar asset_version: Output Asset Version. + :vartype asset_version: str + :ivar auto_delete_setting: Auto delete setting of output data asset. + :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :ivar uri: Output Asset URI. :vartype uri: str @@ -6811,6 +8972,9 @@ class CustomModelJobOutput(AssetJobOutput, JobOutput): _attribute_map = { "description": {"key": "description", "type": "str"}, "job_output_type": {"key": "jobOutputType", "type": "str"}, + "asset_name": {"key": "assetName", "type": "str"}, + "asset_version": {"key": "assetVersion", "type": "str"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, "mode": {"key": "mode", "type": "str"}, "uri": {"key": "uri", "type": "str"}, } @@ -6819,6 +8983,9 @@ def __init__( self, *, description: Optional[str] = None, + asset_name: Optional[str] = None, + asset_version: Optional[str] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, uri: Optional[str] = None, **kwargs: Any @@ -6826,18 +8993,198 @@ def __init__( """ :keyword description: Description for the output. :paramtype description: str - :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount" and "Upload". + :keyword asset_name: Output Asset Name. + :paramtype asset_name: str + :keyword asset_version: Output Asset Version. + :paramtype asset_version: str + :keyword auto_delete_setting: Auto delete setting of output data asset. + :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :keyword uri: Output Asset URI. :paramtype uri: str """ - super().__init__(mode=mode, uri=uri, description=description, **kwargs) + super().__init__( + asset_name=asset_name, + asset_version=asset_version, + auto_delete_setting=auto_delete_setting, + mode=mode, + uri=uri, + description=description, + **kwargs + ) self.description = description self.job_output_type: str = "custom_model" + self.asset_name = asset_name + self.asset_version = asset_version + self.auto_delete_setting = auto_delete_setting self.mode = mode self.uri = uri +class MonitoringSignalBase(_serialization.Model): + """MonitoringSignalBase. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CustomMonitoringSignal, DataDriftMonitoringSignal, DataQualityMonitoringSignal, + FeatureAttributionDriftMonitoringSignal, GenerationSafetyQualityMonitoringSignal, + GenerationTokenStatisticsSignal, ModelPerformanceSignal, PredictionDriftMonitoringSignal + + All required parameters must be populated in order to send to Azure. + + :ivar mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar signal_type: [Required] Specifies the type of signal to monitor. Required. Known values + are: "DataDrift", "PredictionDrift", "DataQuality", "FeatureAttributionDrift", "Custom", + "ModelPerformance", "GenerationSafetyQuality", and "GenerationTokenStatistics". + :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType + """ + + _validation = { + "signal_type": {"required": True}, + } + + _attribute_map = { + "mode": {"key": "mode", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + } + + _subtype_map = { + "signal_type": { + "Custom": "CustomMonitoringSignal", + "DataDrift": "DataDriftMonitoringSignal", + "DataQuality": "DataQualityMonitoringSignal", + "FeatureAttributionDrift": "FeatureAttributionDriftMonitoringSignal", + "GenerationSafetyQuality": "GenerationSafetyQualityMonitoringSignal", + "GenerationTokenStatistics": "GenerationTokenStatisticsSignal", + "ModelPerformance": "ModelPerformanceSignal", + "PredictionDrift": "PredictionDriftMonitoringSignal", + } + } + + def __init__( + self, + *, + mode: Optional[Union[str, "_models.MonitoringNotificationMode"]] = None, + properties: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> None: + """ + :keyword mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + """ + super().__init__(**kwargs) + self.mode = mode + self.properties = properties + self.signal_type: Optional[str] = None + + +class CustomMonitoringSignal(MonitoringSignalBase): + """CustomMonitoringSignal. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar signal_type: [Required] Specifies the type of signal to monitor. Required. Known values + are: "DataDrift", "PredictionDrift", "DataQuality", "FeatureAttributionDrift", "Custom", + "ModelPerformance", "GenerationSafetyQuality", and "GenerationTokenStatistics". + :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType + :ivar component_id: [Required] ARM resource ID of the component resource used to calculate the + custom metrics. Required. + :vartype component_id: str + :ivar input_assets: Monitoring assets to take as input. Key is the component input port name, + value is the data asset. + :vartype input_assets: dict[str, + ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase] + :ivar inputs: Extra component parameters to take as input. Key is the component literal input + port name, value is the parameter value. + :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] + :ivar metric_thresholds: [Required] A list of metrics to calculate and their associated + thresholds. Required. + :vartype metric_thresholds: + list[~azure.mgmt.machinelearningservices.models.CustomMetricThreshold] + :ivar workspace_connection: [Required] A list of metrics to calculate and their associated + thresholds. Required. + :vartype workspace_connection: + ~azure.mgmt.machinelearningservices.models.MonitoringWorkspaceConnection + """ + + _validation = { + "signal_type": {"required": True}, + "component_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "metric_thresholds": {"required": True}, + "workspace_connection": {"required": True}, + } + + _attribute_map = { + "mode": {"key": "mode", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "component_id": {"key": "componentId", "type": "str"}, + "input_assets": {"key": "inputAssets", "type": "{MonitoringInputDataBase}"}, + "inputs": {"key": "inputs", "type": "{JobInput}"}, + "metric_thresholds": {"key": "metricThresholds", "type": "[CustomMetricThreshold]"}, + "workspace_connection": {"key": "workspaceConnection", "type": "MonitoringWorkspaceConnection"}, + } + + def __init__( + self, + *, + component_id: str, + metric_thresholds: List["_models.CustomMetricThreshold"], + workspace_connection: "_models.MonitoringWorkspaceConnection", + mode: Optional[Union[str, "_models.MonitoringNotificationMode"]] = None, + properties: Optional[Dict[str, str]] = None, + input_assets: Optional[Dict[str, "_models.MonitoringInputDataBase"]] = None, + inputs: Optional[Dict[str, "_models.JobInput"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword component_id: [Required] ARM resource ID of the component resource used to calculate + the custom metrics. Required. + :paramtype component_id: str + :keyword input_assets: Monitoring assets to take as input. Key is the component input port + name, value is the data asset. + :paramtype input_assets: dict[str, + ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase] + :keyword inputs: Extra component parameters to take as input. Key is the component literal + input port name, value is the parameter value. + :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] + :keyword metric_thresholds: [Required] A list of metrics to calculate and their associated + thresholds. Required. + :paramtype metric_thresholds: + list[~azure.mgmt.machinelearningservices.models.CustomMetricThreshold] + :keyword workspace_connection: [Required] A list of metrics to calculate and their associated + thresholds. Required. + :paramtype workspace_connection: + ~azure.mgmt.machinelearningservices.models.MonitoringWorkspaceConnection + """ + super().__init__(mode=mode, properties=properties, **kwargs) + self.signal_type: str = "Custom" + self.component_id = component_id + self.input_assets = input_assets + self.inputs = inputs + self.metric_thresholds = metric_thresholds + self.workspace_connection = workspace_connection + + class CustomNCrossValidations(NCrossValidations): """N-Cross validations are specified by user. @@ -7036,6 +9383,105 @@ def __init__(self, *, value: int, **kwargs: Any) -> None: self.value = value +class DataImportSource(_serialization.Model): + """DataImportSource. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + DatabaseSource, FileSystemSource + + All required parameters must be populated in order to send to Azure. + + :ivar connection: Workspace connection for data import source storage. + :vartype connection: str + :ivar source_type: [Required] Specifies the type of data. Required. Known values are: + "database" and "file_system". + :vartype source_type: str or ~azure.mgmt.machinelearningservices.models.DataImportSourceType + """ + + _validation = { + "source_type": {"required": True}, + } + + _attribute_map = { + "connection": {"key": "connection", "type": "str"}, + "source_type": {"key": "sourceType", "type": "str"}, + } + + _subtype_map = {"source_type": {"database": "DatabaseSource", "file_system": "FileSystemSource"}} + + def __init__(self, *, connection: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword connection: Workspace connection for data import source storage. + :paramtype connection: str + """ + super().__init__(**kwargs) + self.connection = connection + self.source_type: Optional[str] = None + + +class DatabaseSource(DataImportSource): + """DatabaseSource. + + All required parameters must be populated in order to send to Azure. + + :ivar connection: Workspace connection for data import source storage. + :vartype connection: str + :ivar source_type: [Required] Specifies the type of data. Required. Known values are: + "database" and "file_system". + :vartype source_type: str or ~azure.mgmt.machinelearningservices.models.DataImportSourceType + :ivar query: SQL Query statement for data import Database source. + :vartype query: str + :ivar stored_procedure: SQL StoredProcedure on data import Database source. + :vartype stored_procedure: str + :ivar stored_procedure_params: SQL StoredProcedure parameters. + :vartype stored_procedure_params: list[dict[str, str]] + :ivar table_name: Name of the table on data import Database source. + :vartype table_name: str + """ + + _validation = { + "source_type": {"required": True}, + } + + _attribute_map = { + "connection": {"key": "connection", "type": "str"}, + "source_type": {"key": "sourceType", "type": "str"}, + "query": {"key": "query", "type": "str"}, + "stored_procedure": {"key": "storedProcedure", "type": "str"}, + "stored_procedure_params": {"key": "storedProcedureParams", "type": "[{str}]"}, + "table_name": {"key": "tableName", "type": "str"}, + } + + def __init__( + self, + *, + connection: Optional[str] = None, + query: Optional[str] = None, + stored_procedure: Optional[str] = None, + stored_procedure_params: Optional[List[Dict[str, str]]] = None, + table_name: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword connection: Workspace connection for data import source storage. + :paramtype connection: str + :keyword query: SQL Query statement for data import Database source. + :paramtype query: str + :keyword stored_procedure: SQL StoredProcedure on data import Database source. + :paramtype stored_procedure: str + :keyword stored_procedure_params: SQL StoredProcedure parameters. + :paramtype stored_procedure_params: list[dict[str, str]] + :keyword table_name: Name of the table on data import Database source. + :paramtype table_name: str + """ + super().__init__(connection=connection, **kwargs) + self.source_type: str = "database" + self.query = query + self.stored_procedure = stored_procedure + self.stored_procedure_params = stored_procedure_params + self.table_name = table_name + + class DatabricksSchema(_serialization.Model): """DatabricksSchema. @@ -7241,30 +9687,95 @@ def __init__( self.workspace_url = workspace_url -class DataContainer(Resource): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. +class DataCollector(_serialization.Model): + """DataCollector. All required parameters must be populated in order to send to Azure. - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: [Required] Additional attributes of the entity. Required. - :vartype properties: ~azure.mgmt.machinelearningservices.models.DataContainerProperties + :ivar collections: [Required] The collection configuration. Each collection has it own + configuration to collect model data and the name of collection can be arbitrary string. + Model data collector can be used for either payload logging or custom logging or both of them. + Collection request and response are reserved for payload logging, others are for custom + logging. Required. + :vartype collections: dict[str, ~azure.mgmt.machinelearningservices.models.Collection] + :ivar request_logging: The request logging configuration for mdc, it includes advanced logging + settings for all collections. It's optional. + :vartype request_logging: ~azure.mgmt.machinelearningservices.models.RequestLogging + :ivar rolling_rate: When model data is collected to blob storage, we need to roll the data to + different path to avoid logging all of them in a single blob file. + If the rolling rate is hour, all data will be collected in the blob path /yyyy/MM/dd/HH/. + If it's day, all data will be collected in blob path /yyyy/MM/dd/. + The other benefit of rolling path is that model monitoring ui is able to select a time range + of data very quickly. Known values are: "Year", "Month", "Day", "Hour", and "Minute". + :vartype rolling_rate: str or ~azure.mgmt.machinelearningservices.models.RollingRateType """ _validation = { - "id": {"readonly": True}, + "collections": {"required": True}, + } + + _attribute_map = { + "collections": {"key": "collections", "type": "{Collection}"}, + "request_logging": {"key": "requestLogging", "type": "RequestLogging"}, + "rolling_rate": {"key": "rollingRate", "type": "str"}, + } + + def __init__( + self, + *, + collections: Dict[str, "_models.Collection"], + request_logging: Optional["_models.RequestLogging"] = None, + rolling_rate: Optional[Union[str, "_models.RollingRateType"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword collections: [Required] The collection configuration. Each collection has it own + configuration to collect model data and the name of collection can be arbitrary string. + Model data collector can be used for either payload logging or custom logging or both of them. + Collection request and response are reserved for payload logging, others are for custom + logging. Required. + :paramtype collections: dict[str, ~azure.mgmt.machinelearningservices.models.Collection] + :keyword request_logging: The request logging configuration for mdc, it includes advanced + logging settings for all collections. It's optional. + :paramtype request_logging: ~azure.mgmt.machinelearningservices.models.RequestLogging + :keyword rolling_rate: When model data is collected to blob storage, we need to roll the data + to different path to avoid logging all of them in a single blob file. + If the rolling rate is hour, all data will be collected in the blob path /yyyy/MM/dd/HH/. + If it's day, all data will be collected in blob path /yyyy/MM/dd/. + The other benefit of rolling path is that model monitoring ui is able to select a time range + of data very quickly. Known values are: "Year", "Month", "Day", "Hour", and "Minute". + :paramtype rolling_rate: str or ~azure.mgmt.machinelearningservices.models.RollingRateType + """ + super().__init__(**kwargs) + self.collections = collections + self.request_logging = request_logging + self.rolling_rate = rolling_rate + + +class DataContainer(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.DataContainerProperties + """ + + _validation = { + "id": {"readonly": True}, "name": {"readonly": True}, "type": {"readonly": True}, "system_data": {"readonly": True}, @@ -7385,6 +9896,103 @@ def __init__( self.value = value +class DataDriftMonitoringSignal(MonitoringSignalBase): + """DataDriftMonitoringSignal. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar signal_type: [Required] Specifies the type of signal to monitor. Required. Known values + are: "DataDrift", "PredictionDrift", "DataQuality", "FeatureAttributionDrift", "Custom", + "ModelPerformance", "GenerationSafetyQuality", and "GenerationTokenStatistics". + :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType + :ivar data_segment: The data segment used for scoping on a subset of the data population. + :vartype data_segment: ~azure.mgmt.machinelearningservices.models.MonitoringDataSegment + :ivar feature_data_type_override: A dictionary that maps feature names to their respective data + types. + :vartype feature_data_type_override: dict[str, str or + ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType] + :ivar features: The feature filter which identifies which feature to calculate drift over. + :vartype features: ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterBase + :ivar metric_thresholds: [Required] A list of metrics to calculate and their associated + thresholds. Required. + :vartype metric_thresholds: + list[~azure.mgmt.machinelearningservices.models.DataDriftMetricThresholdBase] + :ivar production_data: [Required] The data which drift will be calculated for. Required. + :vartype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase + :ivar reference_data: [Required] The data to calculate drift against. Required. + :vartype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase + """ + + _validation = { + "signal_type": {"required": True}, + "metric_thresholds": {"required": True}, + "production_data": {"required": True}, + "reference_data": {"required": True}, + } + + _attribute_map = { + "mode": {"key": "mode", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "data_segment": {"key": "dataSegment", "type": "MonitoringDataSegment"}, + "feature_data_type_override": {"key": "featureDataTypeOverride", "type": "{str}"}, + "features": {"key": "features", "type": "MonitoringFeatureFilterBase"}, + "metric_thresholds": {"key": "metricThresholds", "type": "[DataDriftMetricThresholdBase]"}, + "production_data": {"key": "productionData", "type": "MonitoringInputDataBase"}, + "reference_data": {"key": "referenceData", "type": "MonitoringInputDataBase"}, + } + + def __init__( + self, + *, + metric_thresholds: List["_models.DataDriftMetricThresholdBase"], + production_data: "_models.MonitoringInputDataBase", + reference_data: "_models.MonitoringInputDataBase", + mode: Optional[Union[str, "_models.MonitoringNotificationMode"]] = None, + properties: Optional[Dict[str, str]] = None, + data_segment: Optional["_models.MonitoringDataSegment"] = None, + feature_data_type_override: Optional[Dict[str, Union[str, "_models.MonitoringFeatureDataType"]]] = None, + features: Optional["_models.MonitoringFeatureFilterBase"] = None, + **kwargs: Any + ) -> None: + """ + :keyword mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword data_segment: The data segment used for scoping on a subset of the data population. + :paramtype data_segment: ~azure.mgmt.machinelearningservices.models.MonitoringDataSegment + :keyword feature_data_type_override: A dictionary that maps feature names to their respective + data types. + :paramtype feature_data_type_override: dict[str, str or + ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType] + :keyword features: The feature filter which identifies which feature to calculate drift over. + :paramtype features: ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterBase + :keyword metric_thresholds: [Required] A list of metrics to calculate and their associated + thresholds. Required. + :paramtype metric_thresholds: + list[~azure.mgmt.machinelearningservices.models.DataDriftMetricThresholdBase] + :keyword production_data: [Required] The data which drift will be calculated for. Required. + :paramtype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase + :keyword reference_data: [Required] The data to calculate drift against. Required. + :paramtype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase + """ + super().__init__(mode=mode, properties=properties, **kwargs) + self.signal_type: str = "DataDrift" + self.data_segment = data_segment + self.feature_data_type_override = feature_data_type_override + self.features = features + self.metric_thresholds = metric_thresholds + self.production_data = production_data + self.reference_data = reference_data + + class DataFactory(Compute): """A DataFactory compute. @@ -7473,6 +10081,235 @@ def __init__( self.compute_type: str = "DataFactory" +class DataVersionBaseProperties(AssetBase): + """Data version base definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + MLTableData, UriFileDataVersion, UriFolderDataVersion + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :ivar is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. + :vartype is_archived: bool + :ivar data_type: [Required] Specifies the type of data. Required. Known values are: "uri_file", + "uri_folder", and "mltable". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType + :ivar data_uri: [Required] Uri of the data. Example: + https://go.microsoft.com/fwlink/?linkid=2202330. Required. + :vartype data_uri: str + :ivar intellectual_property: Intellectual Property details. Used if data is an Intellectual + Property. + :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty + :ivar stage: Stage in the data lifecycle assigned to this data asset. + :vartype stage: str + """ + + _validation = { + "data_type": {"required": True}, + "data_uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "data_type": {"key": "dataType", "type": "str"}, + "data_uri": {"key": "dataUri", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "stage": {"key": "stage", "type": "str"}, + } + + _subtype_map = { + "data_type": {"mltable": "MLTableData", "uri_file": "UriFileDataVersion", "uri_folder": "UriFolderDataVersion"} + } + + def __init__( + self, + *, + data_uri: str, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + is_anonymous: bool = False, + is_archived: bool = False, + intellectual_property: Optional["_models.IntellectualProperty"] = None, + stage: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :keyword is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. + :paramtype is_archived: bool + :keyword data_uri: [Required] Uri of the data. Example: + https://go.microsoft.com/fwlink/?linkid=2202330. Required. + :paramtype data_uri: str + :keyword intellectual_property: Intellectual Property details. Used if data is an Intellectual + Property. + :paramtype intellectual_property: + ~azure.mgmt.machinelearningservices.models.IntellectualProperty + :keyword stage: Stage in the data lifecycle assigned to this data asset. + :paramtype stage: str + """ + super().__init__( + description=description, + properties=properties, + tags=tags, + auto_delete_setting=auto_delete_setting, + is_anonymous=is_anonymous, + is_archived=is_archived, + **kwargs + ) + self.data_type: Optional[str] = None + self.data_uri = data_uri + self.intellectual_property = intellectual_property + self.stage = stage + + +class DataImport(DataVersionBaseProperties): # pylint: disable=too-many-instance-attributes + """DataImport. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :ivar is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. + :vartype is_archived: bool + :ivar data_type: [Required] Specifies the type of data. Required. Known values are: "uri_file", + "uri_folder", and "mltable". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType + :ivar data_uri: [Required] Uri of the data. Example: + https://go.microsoft.com/fwlink/?linkid=2202330. Required. + :vartype data_uri: str + :ivar intellectual_property: Intellectual Property details. Used if data is an Intellectual + Property. + :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty + :ivar stage: Stage in the data lifecycle assigned to this data asset. + :vartype stage: str + :ivar asset_name: Name of the asset for data import job to create. + :vartype asset_name: str + :ivar source: Source data of the asset to import from. + :vartype source: ~azure.mgmt.machinelearningservices.models.DataImportSource + """ + + _validation = { + "data_type": {"required": True}, + "data_uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "data_type": {"key": "dataType", "type": "str"}, + "data_uri": {"key": "dataUri", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "stage": {"key": "stage", "type": "str"}, + "asset_name": {"key": "assetName", "type": "str"}, + "source": {"key": "source", "type": "DataImportSource"}, + } + + def __init__( + self, + *, + data_uri: str, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + is_anonymous: bool = False, + is_archived: bool = False, + intellectual_property: Optional["_models.IntellectualProperty"] = None, + stage: Optional[str] = None, + asset_name: Optional[str] = None, + source: Optional["_models.DataImportSource"] = None, + **kwargs: Any + ) -> None: + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :keyword is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. + :paramtype is_archived: bool + :keyword data_uri: [Required] Uri of the data. Example: + https://go.microsoft.com/fwlink/?linkid=2202330. Required. + :paramtype data_uri: str + :keyword intellectual_property: Intellectual Property details. Used if data is an Intellectual + Property. + :paramtype intellectual_property: + ~azure.mgmt.machinelearningservices.models.IntellectualProperty + :keyword stage: Stage in the data lifecycle assigned to this data asset. + :paramtype stage: str + :keyword asset_name: Name of the asset for data import job to create. + :paramtype asset_name: str + :keyword source: Source data of the asset to import from. + :paramtype source: ~azure.mgmt.machinelearningservices.models.DataImportSource + """ + super().__init__( + description=description, + properties=properties, + tags=tags, + auto_delete_setting=auto_delete_setting, + is_anonymous=is_anonymous, + is_archived=is_archived, + data_uri=data_uri, + intellectual_property=intellectual_property, + stage=stage, + **kwargs + ) + self.data_type: str = "uri_folder" + self.asset_name = asset_name + self.source = source + + class DataLakeAnalyticsSchema(_serialization.Model): """DataLakeAnalyticsSchema. @@ -7661,47 +10498,186 @@ def __init__(self, *, datastore_id: Optional[str] = None, path: Optional[str] = self.path = path -class Datastore(Resource): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. +class DataQualityMonitoringSignal(MonitoringSignalBase): + """DataQualityMonitoringSignal. All required parameters must be populated in order to send to Azure. - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: [Required] Additional attributes of the entity. Required. - :vartype properties: ~azure.mgmt.machinelearningservices.models.DatastoreProperties + :ivar mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar signal_type: [Required] Specifies the type of signal to monitor. Required. Known values + are: "DataDrift", "PredictionDrift", "DataQuality", "FeatureAttributionDrift", "Custom", + "ModelPerformance", "GenerationSafetyQuality", and "GenerationTokenStatistics". + :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType + :ivar feature_data_type_override: A dictionary that maps feature names to their respective data + types. + :vartype feature_data_type_override: dict[str, str or + ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType] + :ivar features: The features to calculate drift over. + :vartype features: ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterBase + :ivar metric_thresholds: [Required] A list of metrics to calculate and their associated + thresholds. Required. + :vartype metric_thresholds: + list[~azure.mgmt.machinelearningservices.models.DataQualityMetricThresholdBase] + :ivar production_data: [Required] The data produced by the production service which drift will + be calculated for. Required. + :vartype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase + :ivar reference_data: [Required] The data to calculate drift against. Required. + :vartype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase """ _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "system_data": {"readonly": True}, - "properties": {"required": True}, + "signal_type": {"required": True}, + "metric_thresholds": {"required": True}, + "production_data": {"required": True}, + "reference_data": {"required": True}, } _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "system_data": {"key": "systemData", "type": "SystemData"}, - "properties": {"key": "properties", "type": "DatastoreProperties"}, + "mode": {"key": "mode", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "feature_data_type_override": {"key": "featureDataTypeOverride", "type": "{str}"}, + "features": {"key": "features", "type": "MonitoringFeatureFilterBase"}, + "metric_thresholds": {"key": "metricThresholds", "type": "[DataQualityMetricThresholdBase]"}, + "production_data": {"key": "productionData", "type": "MonitoringInputDataBase"}, + "reference_data": {"key": "referenceData", "type": "MonitoringInputDataBase"}, } - def __init__(self, *, properties: "_models.DatastoreProperties", **kwargs: Any) -> None: - """ - :keyword properties: [Required] Additional attributes of the entity. Required. + def __init__( + self, + *, + metric_thresholds: List["_models.DataQualityMetricThresholdBase"], + production_data: "_models.MonitoringInputDataBase", + reference_data: "_models.MonitoringInputDataBase", + mode: Optional[Union[str, "_models.MonitoringNotificationMode"]] = None, + properties: Optional[Dict[str, str]] = None, + feature_data_type_override: Optional[Dict[str, Union[str, "_models.MonitoringFeatureDataType"]]] = None, + features: Optional["_models.MonitoringFeatureFilterBase"] = None, + **kwargs: Any + ) -> None: + """ + :keyword mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword feature_data_type_override: A dictionary that maps feature names to their respective + data types. + :paramtype feature_data_type_override: dict[str, str or + ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType] + :keyword features: The features to calculate drift over. + :paramtype features: ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterBase + :keyword metric_thresholds: [Required] A list of metrics to calculate and their associated + thresholds. Required. + :paramtype metric_thresholds: + list[~azure.mgmt.machinelearningservices.models.DataQualityMetricThresholdBase] + :keyword production_data: [Required] The data produced by the production service which drift + will be calculated for. Required. + :paramtype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase + :keyword reference_data: [Required] The data to calculate drift against. Required. + :paramtype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase + """ + super().__init__(mode=mode, properties=properties, **kwargs) + self.signal_type: str = "DataQuality" + self.feature_data_type_override = feature_data_type_override + self.features = features + self.metric_thresholds = metric_thresholds + self.production_data = production_data + self.reference_data = reference_data + + +class DatasetExportSummary(ExportSummary): + """DatasetExportSummary. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar end_date_time: The time when the export was completed. + :vartype end_date_time: ~datetime.datetime + :ivar exported_row_count: The total number of labeled datapoints exported. + :vartype exported_row_count: int + :ivar format: [Required] The format of exported labels, also as the discriminator. Required. + Known values are: "Dataset", "Coco", and "CSV". + :vartype format: str or ~azure.mgmt.machinelearningservices.models.ExportFormatType + :ivar labeling_job_id: Name and identifier of the job containing exported labels. + :vartype labeling_job_id: str + :ivar start_date_time: The time when the export was requested. + :vartype start_date_time: ~datetime.datetime + :ivar labeled_asset_name: The unique name of the labeled data asset. + :vartype labeled_asset_name: str + """ + + _validation = { + "end_date_time": {"readonly": True}, + "exported_row_count": {"readonly": True}, + "format": {"required": True}, + "labeling_job_id": {"readonly": True}, + "start_date_time": {"readonly": True}, + "labeled_asset_name": {"readonly": True}, + } + + _attribute_map = { + "end_date_time": {"key": "endDateTime", "type": "iso-8601"}, + "exported_row_count": {"key": "exportedRowCount", "type": "int"}, + "format": {"key": "format", "type": "str"}, + "labeling_job_id": {"key": "labelingJobId", "type": "str"}, + "start_date_time": {"key": "startDateTime", "type": "iso-8601"}, + "labeled_asset_name": {"key": "labeledAssetName", "type": "str"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.format: str = "Dataset" + self.labeled_asset_name = None + + +class Datastore(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.DatastoreProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "DatastoreProperties"}, + } + + def __init__(self, *, properties: "_models.DatastoreProperties", **kwargs: Any) -> None: + """ + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.DatastoreProperties """ super().__init__(**kwargs) @@ -7785,89 +10761,6 @@ def __init__(self, *, properties: "_models.DataVersionBaseProperties", **kwargs: self.properties = properties -class DataVersionBaseProperties(AssetBase): - """Data version base definition. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - MLTableData, UriFileDataVersion, UriFolderDataVersion - - All required parameters must be populated in order to send to Azure. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar is_anonymous: If the name version are system generated (anonymous registration). - :vartype is_anonymous: bool - :ivar is_archived: Is the asset archived?. - :vartype is_archived: bool - :ivar data_type: [Required] Specifies the type of data. Required. Known values are: "uri_file", - "uri_folder", and "mltable". - :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType - :ivar data_uri: [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. Required. - :vartype data_uri: str - """ - - _validation = { - "data_type": {"required": True}, - "data_uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, - } - - _attribute_map = { - "description": {"key": "description", "type": "str"}, - "properties": {"key": "properties", "type": "{str}"}, - "tags": {"key": "tags", "type": "{str}"}, - "is_anonymous": {"key": "isAnonymous", "type": "bool"}, - "is_archived": {"key": "isArchived", "type": "bool"}, - "data_type": {"key": "dataType", "type": "str"}, - "data_uri": {"key": "dataUri", "type": "str"}, - } - - _subtype_map = { - "data_type": {"mltable": "MLTableData", "uri_file": "UriFileDataVersion", "uri_folder": "UriFolderDataVersion"} - } - - def __init__( - self, - *, - data_uri: str, - description: Optional[str] = None, - properties: Optional[Dict[str, str]] = None, - tags: Optional[Dict[str, str]] = None, - is_anonymous: bool = False, - is_archived: bool = False, - **kwargs: Any - ) -> None: - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword is_anonymous: If the name version are system generated (anonymous registration). - :paramtype is_anonymous: bool - :keyword is_archived: Is the asset archived?. - :paramtype is_archived: bool - :keyword data_uri: [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. Required. - :paramtype data_uri: str - """ - super().__init__( - description=description, - properties=properties, - tags=tags, - is_anonymous=is_anonymous, - is_archived=is_archived, - **kwargs - ) - self.data_type: Optional[str] = None - self.data_uri = data_uri - - class DataVersionBaseResourceArmPaginatedResult(_serialization.Model): """A paginated list of DataVersionBase entities. @@ -7977,7 +10870,7 @@ class DeploymentLogsRequest(_serialization.Model): """DeploymentLogsRequest. :ivar container_type: The type of container to retrieve logs from. Known values are: - "StorageInitializer" and "InferenceServer". + "StorageInitializer", "InferenceServer", and "ModelDataCollector". :vartype container_type: str or ~azure.mgmt.machinelearningservices.models.ContainerType :ivar tail: The maximum number of lines to tail. :vartype tail: int @@ -7997,7 +10890,7 @@ def __init__( ) -> None: """ :keyword container_type: The type of container to retrieve logs from. Known values are: - "StorageInitializer" and "InferenceServer". + "StorageInitializer", "InferenceServer", and "ModelDataCollector". :paramtype container_type: str or ~azure.mgmt.machinelearningservices.models.ContainerType :keyword tail: The maximum number of lines to tail. :paramtype tail: int @@ -8014,6 +10907,12 @@ class ResourceConfiguration(_serialization.Model): :vartype instance_count: int :ivar instance_type: Optional type of VM used as supported by the compute target. :vartype instance_type: str + :ivar locations: Locations where the job can run. + :vartype locations: list[str] + :ivar max_instance_count: Optional max allowed number of instances or nodes to be used by the + compute target. + For use with elastic training, currently supported by PyTorch distribution type only. + :vartype max_instance_count: int :ivar properties: Additional properties bag. :vartype properties: dict[str, JSON] """ @@ -8021,6 +10920,8 @@ class ResourceConfiguration(_serialization.Model): _attribute_map = { "instance_count": {"key": "instanceCount", "type": "int"}, "instance_type": {"key": "instanceType", "type": "str"}, + "locations": {"key": "locations", "type": "[str]"}, + "max_instance_count": {"key": "maxInstanceCount", "type": "int"}, "properties": {"key": "properties", "type": "{object}"}, } @@ -8029,6 +10930,8 @@ def __init__( *, instance_count: int = 1, instance_type: Optional[str] = None, + locations: Optional[List[str]] = None, + max_instance_count: Optional[int] = None, properties: Optional[Dict[str, JSON]] = None, **kwargs: Any ) -> None: @@ -8037,12 +10940,20 @@ def __init__( :paramtype instance_count: int :keyword instance_type: Optional type of VM used as supported by the compute target. :paramtype instance_type: str + :keyword locations: Locations where the job can run. + :paramtype locations: list[str] + :keyword max_instance_count: Optional max allowed number of instances or nodes to be used by + the compute target. + For use with elastic training, currently supported by PyTorch distribution type only. + :paramtype max_instance_count: int :keyword properties: Additional properties bag. :paramtype properties: dict[str, JSON] """ super().__init__(**kwargs) self.instance_count = instance_count self.instance_type = instance_type + self.locations = locations + self.max_instance_count = max_instance_count self.properties = properties @@ -8053,6 +10964,12 @@ class DeploymentResourceConfiguration(ResourceConfiguration): :vartype instance_count: int :ivar instance_type: Optional type of VM used as supported by the compute target. :vartype instance_type: str + :ivar locations: Locations where the job can run. + :vartype locations: list[str] + :ivar max_instance_count: Optional max allowed number of instances or nodes to be used by the + compute target. + For use with elastic training, currently supported by PyTorch distribution type only. + :vartype max_instance_count: int :ivar properties: Additional properties bag. :vartype properties: dict[str, JSON] """ @@ -8060,6 +10977,8 @@ class DeploymentResourceConfiguration(ResourceConfiguration): _attribute_map = { "instance_count": {"key": "instanceCount", "type": "int"}, "instance_type": {"key": "instanceType", "type": "str"}, + "locations": {"key": "locations", "type": "[str]"}, + "max_instance_count": {"key": "maxInstanceCount", "type": "int"}, "properties": {"key": "properties", "type": "{object}"}, } @@ -8068,6 +10987,8 @@ def __init__( *, instance_count: int = 1, instance_type: Optional[str] = None, + locations: Optional[List[str]] = None, + max_instance_count: Optional[int] = None, properties: Optional[Dict[str, JSON]] = None, **kwargs: Any ) -> None: @@ -8076,91 +10997,104 @@ def __init__( :paramtype instance_count: int :keyword instance_type: Optional type of VM used as supported by the compute target. :paramtype instance_type: str + :keyword locations: Locations where the job can run. + :paramtype locations: list[str] + :keyword max_instance_count: Optional max allowed number of instances or nodes to be used by + the compute target. + For use with elastic training, currently supported by PyTorch distribution type only. + :paramtype max_instance_count: int :keyword properties: Additional properties bag. :paramtype properties: dict[str, JSON] """ - super().__init__(instance_count=instance_count, instance_type=instance_type, properties=properties, **kwargs) + super().__init__( + instance_count=instance_count, + instance_type=instance_type, + locations=locations, + max_instance_count=max_instance_count, + properties=properties, + **kwargs + ) class DiagnoseRequestProperties(_serialization.Model): """DiagnoseRequestProperties. - :ivar udr: Setting for diagnosing user defined routing. - :vartype udr: dict[str, JSON] - :ivar nsg: Setting for diagnosing network security group. - :vartype nsg: dict[str, JSON] - :ivar resource_lock: Setting for diagnosing resource lock. - :vartype resource_lock: dict[str, JSON] + :ivar application_insights: Setting for diagnosing dependent application insights. + :vartype application_insights: dict[str, any] + :ivar container_registry: Setting for diagnosing dependent container registry. + :vartype container_registry: dict[str, any] :ivar dns_resolution: Setting for diagnosing dns resolution. - :vartype dns_resolution: dict[str, JSON] - :ivar storage_account: Setting for diagnosing dependent storage account. - :vartype storage_account: dict[str, JSON] + :vartype dns_resolution: dict[str, any] :ivar key_vault: Setting for diagnosing dependent key vault. - :vartype key_vault: dict[str, JSON] - :ivar container_registry: Setting for diagnosing dependent container registry. - :vartype container_registry: dict[str, JSON] - :ivar application_insights: Setting for diagnosing dependent application insights. - :vartype application_insights: dict[str, JSON] + :vartype key_vault: dict[str, any] + :ivar nsg: Setting for diagnosing network security group. + :vartype nsg: dict[str, any] :ivar others: Setting for diagnosing unclassified category of problems. - :vartype others: dict[str, JSON] + :vartype others: dict[str, any] + :ivar resource_lock: Setting for diagnosing resource lock. + :vartype resource_lock: dict[str, any] + :ivar storage_account: Setting for diagnosing dependent storage account. + :vartype storage_account: dict[str, any] + :ivar udr: Setting for diagnosing user defined routing. + :vartype udr: dict[str, any] """ _attribute_map = { - "udr": {"key": "udr", "type": "{object}"}, - "nsg": {"key": "nsg", "type": "{object}"}, - "resource_lock": {"key": "resourceLock", "type": "{object}"}, + "application_insights": {"key": "applicationInsights", "type": "{object}"}, + "container_registry": {"key": "containerRegistry", "type": "{object}"}, "dns_resolution": {"key": "dnsResolution", "type": "{object}"}, - "storage_account": {"key": "storageAccount", "type": "{object}"}, "key_vault": {"key": "keyVault", "type": "{object}"}, - "container_registry": {"key": "containerRegistry", "type": "{object}"}, - "application_insights": {"key": "applicationInsights", "type": "{object}"}, + "nsg": {"key": "nsg", "type": "{object}"}, "others": {"key": "others", "type": "{object}"}, + "resource_lock": {"key": "resourceLock", "type": "{object}"}, + "storage_account": {"key": "storageAccount", "type": "{object}"}, + "udr": {"key": "udr", "type": "{object}"}, } def __init__( self, *, - udr: Optional[Dict[str, JSON]] = None, - nsg: Optional[Dict[str, JSON]] = None, - resource_lock: Optional[Dict[str, JSON]] = None, - dns_resolution: Optional[Dict[str, JSON]] = None, - storage_account: Optional[Dict[str, JSON]] = None, - key_vault: Optional[Dict[str, JSON]] = None, - container_registry: Optional[Dict[str, JSON]] = None, - application_insights: Optional[Dict[str, JSON]] = None, - others: Optional[Dict[str, JSON]] = None, + application_insights: Optional[Dict[str, Any]] = None, + container_registry: Optional[Dict[str, Any]] = None, + dns_resolution: Optional[Dict[str, Any]] = None, + key_vault: Optional[Dict[str, Any]] = None, + nsg: Optional[Dict[str, Any]] = None, + others: Optional[Dict[str, Any]] = None, + resource_lock: Optional[Dict[str, Any]] = None, + storage_account: Optional[Dict[str, Any]] = None, + udr: Optional[Dict[str, Any]] = None, **kwargs: Any ) -> None: """ - :keyword udr: Setting for diagnosing user defined routing. - :paramtype udr: dict[str, JSON] - :keyword nsg: Setting for diagnosing network security group. - :paramtype nsg: dict[str, JSON] - :keyword resource_lock: Setting for diagnosing resource lock. - :paramtype resource_lock: dict[str, JSON] + :keyword application_insights: Setting for diagnosing dependent application insights. + :paramtype application_insights: dict[str, any] + :keyword container_registry: Setting for diagnosing dependent container registry. + :paramtype container_registry: dict[str, any] :keyword dns_resolution: Setting for diagnosing dns resolution. - :paramtype dns_resolution: dict[str, JSON] - :keyword storage_account: Setting for diagnosing dependent storage account. - :paramtype storage_account: dict[str, JSON] + :paramtype dns_resolution: dict[str, any] :keyword key_vault: Setting for diagnosing dependent key vault. - :paramtype key_vault: dict[str, JSON] - :keyword container_registry: Setting for diagnosing dependent container registry. - :paramtype container_registry: dict[str, JSON] - :keyword application_insights: Setting for diagnosing dependent application insights. - :paramtype application_insights: dict[str, JSON] + :paramtype key_vault: dict[str, any] + :keyword nsg: Setting for diagnosing network security group. + :paramtype nsg: dict[str, any] :keyword others: Setting for diagnosing unclassified category of problems. - :paramtype others: dict[str, JSON] + :paramtype others: dict[str, any] + :keyword resource_lock: Setting for diagnosing resource lock. + :paramtype resource_lock: dict[str, any] + :keyword storage_account: Setting for diagnosing dependent storage account. + :paramtype storage_account: dict[str, any] + :keyword udr: Setting for diagnosing user defined routing. + :paramtype udr: dict[str, any] """ super().__init__(**kwargs) - self.udr = udr - self.nsg = nsg - self.resource_lock = resource_lock + self.application_insights = application_insights + self.container_registry = container_registry self.dns_resolution = dns_resolution - self.storage_account = storage_account self.key_vault = key_vault - self.container_registry = container_registry - self.application_insights = application_insights + self.nsg = nsg self.others = others + self.resource_lock = resource_lock + self.storage_account = storage_account + self.udr = udr class DiagnoseResponseResult(_serialization.Model): @@ -8314,7 +11248,7 @@ def __init__(self, **kwargs: Any) -> None: class DiagnoseWorkspaceParameters(_serialization.Model): """Parameters to diagnose a workspace. - :ivar value: Value of Parameters. + :ivar value: :vartype value: ~azure.mgmt.machinelearningservices.models.DiagnoseRequestProperties """ @@ -8324,7 +11258,7 @@ class DiagnoseWorkspaceParameters(_serialization.Model): def __init__(self, *, value: Optional["_models.DiagnoseRequestProperties"] = None, **kwargs: Any) -> None: """ - :keyword value: Value of Parameters. + :keyword value: :paramtype value: ~azure.mgmt.machinelearningservices.models.DiagnoseRequestProperties """ super().__init__(**kwargs) @@ -8335,12 +11269,12 @@ class DistributionConfiguration(_serialization.Model): """Base definition for job distribution configuration. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - Mpi, PyTorch, TensorFlow + Mpi, PyTorch, Ray, TensorFlow All required parameters must be populated in order to send to Azure. :ivar distribution_type: [Required] Specifies the type of distribution framework. Required. - Known values are: "PyTorch", "TensorFlow", and "Mpi". + Known values are: "PyTorch", "TensorFlow", "Mpi", and "Ray". :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType """ @@ -8352,7 +11286,7 @@ class DistributionConfiguration(_serialization.Model): "distribution_type": {"key": "distributionType", "type": "str"}, } - _subtype_map = {"distribution_type": {"Mpi": "Mpi", "PyTorch": "PyTorch", "TensorFlow": "TensorFlow"}} + _subtype_map = {"distribution_type": {"Mpi": "Mpi", "PyTorch": "PyTorch", "Ray": "Ray", "TensorFlow": "TensorFlow"}} def __init__(self, **kwargs: Any) -> None: """ """ @@ -8361,7 +11295,7 @@ def __init__(self, **kwargs: Any) -> None: class Docker(_serialization.Model): - """Docker container configuration. + """Docker. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. @@ -8394,49 +11328,66 @@ def __init__( self.privileged = privileged -class EncryptionKeyVaultProperties(_serialization.Model): - """EncryptionKeyVaultProperties. +class EmailMonitoringAlertNotificationSettings(MonitoringAlertNotificationSettingsBase): + """EmailMonitoringAlertNotificationSettings. All required parameters must be populated in order to send to Azure. - :ivar key_vault_arm_id: The ArmId of the keyVault where the customer owned encryption key is - present. Required. - :vartype key_vault_arm_id: str - :ivar key_identifier: Key vault uri to access the encryption key. Required. - :vartype key_identifier: str - :ivar identity_client_id: For future use - The client id of the identity which will be used to - access key vault. - :vartype identity_client_id: str + :ivar alert_notification_type: [Required] Specifies the type of signal to monitor. Required. + Known values are: "AzureMonitor" and "Email". + :vartype alert_notification_type: str or + ~azure.mgmt.machinelearningservices.models.MonitoringAlertNotificationType + :ivar email_notification_setting: Configuration for notification. + :vartype email_notification_setting: + ~azure.mgmt.machinelearningservices.models.NotificationSetting """ _validation = { - "key_vault_arm_id": {"required": True}, - "key_identifier": {"required": True}, + "alert_notification_type": {"required": True}, } _attribute_map = { - "key_vault_arm_id": {"key": "keyVaultArmId", "type": "str"}, - "key_identifier": {"key": "keyIdentifier", "type": "str"}, - "identity_client_id": {"key": "identityClientId", "type": "str"}, + "alert_notification_type": {"key": "alertNotificationType", "type": "str"}, + "email_notification_setting": {"key": "emailNotificationSetting", "type": "NotificationSetting"}, } def __init__( - self, *, key_vault_arm_id: str, key_identifier: str, identity_client_id: Optional[str] = None, **kwargs: Any + self, *, email_notification_setting: Optional["_models.NotificationSetting"] = None, **kwargs: Any ) -> None: """ - :keyword key_vault_arm_id: The ArmId of the keyVault where the customer owned encryption key is - present. Required. - :paramtype key_vault_arm_id: str - :keyword key_identifier: Key vault uri to access the encryption key. Required. + :keyword email_notification_setting: Configuration for notification. + :paramtype email_notification_setting: + ~azure.mgmt.machinelearningservices.models.NotificationSetting + """ + super().__init__(**kwargs) + self.alert_notification_type: str = "Email" + self.email_notification_setting = email_notification_setting + + +class EncryptionKeyVaultUpdateProperties(_serialization.Model): + """EncryptionKeyVaultUpdateProperties. + + All required parameters must be populated in order to send to Azure. + + :ivar key_identifier: Required. + :vartype key_identifier: str + """ + + _validation = { + "key_identifier": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "key_identifier": {"key": "keyIdentifier", "type": "str"}, + } + + def __init__(self, *, key_identifier: str, **kwargs: Any) -> None: + """ + :keyword key_identifier: Required. :paramtype key_identifier: str - :keyword identity_client_id: For future use - The client id of the identity which will be used - to access key vault. - :paramtype identity_client_id: str """ super().__init__(**kwargs) - self.key_vault_arm_id = key_vault_arm_id self.key_identifier = key_identifier - self.identity_client_id = identity_client_id class EncryptionProperty(_serialization.Model): @@ -8444,66 +11395,124 @@ class EncryptionProperty(_serialization.Model): All required parameters must be populated in order to send to Azure. + :ivar cosmos_db_resource_id: The byok cosmosdb account that customer brings to store customer's + data + with encryption. + :vartype cosmos_db_resource_id: str + :ivar identity: Identity to be used with the keyVault. + :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityForCmk + :ivar key_vault_properties: KeyVault details to do the encryption. Required. + :vartype key_vault_properties: ~azure.mgmt.machinelearningservices.models.KeyVaultProperties + :ivar search_account_resource_id: The byok search account that customer brings to store + customer's data + with encryption. + :vartype search_account_resource_id: str :ivar status: Indicates whether or not the encryption is enabled for the workspace. Required. Known values are: "Enabled" and "Disabled". :vartype status: str or ~azure.mgmt.machinelearningservices.models.EncryptionStatus - :ivar identity: The identity that will be used to access the key vault for encryption at rest. - :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityForCmk - :ivar key_vault_properties: Customer Key vault properties. Required. - :vartype key_vault_properties: - ~azure.mgmt.machinelearningservices.models.EncryptionKeyVaultProperties + :ivar storage_account_resource_id: The byok storage account that customer brings to store + customer's data + with encryption. + :vartype storage_account_resource_id: str """ _validation = { - "status": {"required": True}, "key_vault_properties": {"required": True}, + "status": {"required": True}, } _attribute_map = { - "status": {"key": "status", "type": "str"}, + "cosmos_db_resource_id": {"key": "cosmosDbResourceId", "type": "str"}, "identity": {"key": "identity", "type": "IdentityForCmk"}, - "key_vault_properties": {"key": "keyVaultProperties", "type": "EncryptionKeyVaultProperties"}, + "key_vault_properties": {"key": "keyVaultProperties", "type": "KeyVaultProperties"}, + "search_account_resource_id": {"key": "searchAccountResourceId", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "storage_account_resource_id": {"key": "storageAccountResourceId", "type": "str"}, } def __init__( self, *, + key_vault_properties: "_models.KeyVaultProperties", status: Union[str, "_models.EncryptionStatus"], - key_vault_properties: "_models.EncryptionKeyVaultProperties", + cosmos_db_resource_id: Optional[str] = None, identity: Optional["_models.IdentityForCmk"] = None, + search_account_resource_id: Optional[str] = None, + storage_account_resource_id: Optional[str] = None, **kwargs: Any ) -> None: """ + :keyword cosmos_db_resource_id: The byok cosmosdb account that customer brings to store + customer's data + with encryption. + :paramtype cosmos_db_resource_id: str + :keyword identity: Identity to be used with the keyVault. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityForCmk + :keyword key_vault_properties: KeyVault details to do the encryption. Required. + :paramtype key_vault_properties: ~azure.mgmt.machinelearningservices.models.KeyVaultProperties + :keyword search_account_resource_id: The byok search account that customer brings to store + customer's data + with encryption. + :paramtype search_account_resource_id: str :keyword status: Indicates whether or not the encryption is enabled for the workspace. Required. Known values are: "Enabled" and "Disabled". :paramtype status: str or ~azure.mgmt.machinelearningservices.models.EncryptionStatus - :keyword identity: The identity that will be used to access the key vault for encryption at - rest. - :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityForCmk - :keyword key_vault_properties: Customer Key vault properties. Required. - :paramtype key_vault_properties: - ~azure.mgmt.machinelearningservices.models.EncryptionKeyVaultProperties + :keyword storage_account_resource_id: The byok storage account that customer brings to store + customer's data + with encryption. + :paramtype storage_account_resource_id: str """ super().__init__(**kwargs) - self.status = status + self.cosmos_db_resource_id = cosmos_db_resource_id self.identity = identity self.key_vault_properties = key_vault_properties + self.search_account_resource_id = search_account_resource_id + self.status = status + self.storage_account_resource_id = storage_account_resource_id -class Endpoint(_serialization.Model): - """Describes the endpoint configuration for the container. +class EncryptionUpdateProperties(_serialization.Model): + """EncryptionUpdateProperties. - :ivar protocol: Protocol over which communication will happen over this endpoint. Known values - are: "tcp", "udp", and "http". - :vartype protocol: str or ~azure.mgmt.machinelearningservices.models.Protocol - :ivar name: Name of the Endpoint. - :vartype name: str - :ivar target: Application port inside the container. - :vartype target: int - :ivar published: Port over which the application is exposed from container. - :vartype published: int - :ivar host_ip: Host IP over which the application is exposed from the container. - :vartype host_ip: str + All required parameters must be populated in order to send to Azure. + + :ivar key_vault_properties: Required. + :vartype key_vault_properties: + ~azure.mgmt.machinelearningservices.models.EncryptionKeyVaultUpdateProperties + """ + + _validation = { + "key_vault_properties": {"required": True}, + } + + _attribute_map = { + "key_vault_properties": {"key": "keyVaultProperties", "type": "EncryptionKeyVaultUpdateProperties"}, + } + + def __init__(self, *, key_vault_properties: "_models.EncryptionKeyVaultUpdateProperties", **kwargs: Any) -> None: + """ + :keyword key_vault_properties: Required. + :paramtype key_vault_properties: + ~azure.mgmt.machinelearningservices.models.EncryptionKeyVaultUpdateProperties + """ + super().__init__(**kwargs) + self.key_vault_properties = key_vault_properties + + +class Endpoint(_serialization.Model): + """Endpoint. + + :ivar protocol: Protocol over which communication will happen over this endpoint. Known values + are: "tcp", "udp", and "http". + :vartype protocol: str or ~azure.mgmt.machinelearningservices.models.Protocol + :ivar name: Name of the Endpoint. + :vartype name: str + :ivar target: Application port inside the container. + :vartype target: int + :ivar published: Port over which the application is exposed from container. + :vartype published: int + :ivar host_ip: Host IP over which the application is exposed from the container. + :vartype host_ip: str """ _attribute_map = { @@ -8619,42 +11628,13 @@ def __init__( self.token_type = token_type -class ScheduleActionBase(_serialization.Model): - """ScheduleActionBase. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - JobScheduleAction, EndpointScheduleAction - - All required parameters must be populated in order to send to Azure. - - :ivar action_type: [Required] Specifies the action type of the schedule. Required. Known values - are: "CreateJob" and "InvokeBatchEndpoint". - :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType - """ - - _validation = { - "action_type": {"required": True}, - } - - _attribute_map = { - "action_type": {"key": "actionType", "type": "str"}, - } - - _subtype_map = {"action_type": {"CreateJob": "JobScheduleAction", "InvokeBatchEndpoint": "EndpointScheduleAction"}} - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.action_type: Optional[str] = None - - class EndpointScheduleAction(ScheduleActionBase): """EndpointScheduleAction. All required parameters must be populated in order to send to Azure. :ivar action_type: [Required] Specifies the action type of the schedule. Required. Known values - are: "CreateJob" and "InvokeBatchEndpoint". + are: "CreateJob", "InvokeBatchEndpoint", "ImportData", and "CreateMonitor". :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType :ivar endpoint_invocation_definition: [Required] Defines Schedule action definition details. @@ -8835,7 +11815,7 @@ def __init__( class EnvironmentVariable(_serialization.Model): - """Environment Variables for the container. + """EnvironmentVariable. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. @@ -8935,9 +11915,13 @@ class EnvironmentVersionProperties(AssetBase): # pylint: disable=too-many-insta :vartype properties: dict[str, str] :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] - :ivar is_anonymous: If the name version are system generated (anonymous registration). + :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :ivar is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. :vartype is_anonymous: bool - :ivar is_archived: Is the asset archived?. + :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. :vartype is_archived: bool :ivar auto_rebuild: Defines if image needs to be rebuilt based on base image changes. Known values are: "Disabled" and "OnBaseImageUpdate". @@ -8976,6 +11960,9 @@ class EnvironmentVersionProperties(AssetBase): # pylint: disable=too-many-insta :ivar inference_config: Defines configuration specific to inference. :vartype inference_config: ~azure.mgmt.machinelearningservices.models.InferenceContainerProperties + :ivar intellectual_property: Intellectual Property details. Used if environment is an + Intellectual Property. + :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty :ivar os_type: The OS type of the environment. Known values are: "Linux" and "Windows". :vartype os_type: str or ~azure.mgmt.machinelearningservices.models.OperatingSystemType :ivar provisioning_state: Provisioning state for the environment version. Known values are: @@ -8995,6 +11982,7 @@ class EnvironmentVersionProperties(AssetBase): # pylint: disable=too-many-insta "description": {"key": "description", "type": "str"}, "properties": {"key": "properties", "type": "{str}"}, "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, "is_anonymous": {"key": "isAnonymous", "type": "bool"}, "is_archived": {"key": "isArchived", "type": "bool"}, "auto_rebuild": {"key": "autoRebuild", "type": "str"}, @@ -9003,6 +11991,7 @@ class EnvironmentVersionProperties(AssetBase): # pylint: disable=too-many-insta "environment_type": {"key": "environmentType", "type": "str"}, "image": {"key": "image", "type": "str"}, "inference_config": {"key": "inferenceConfig", "type": "InferenceContainerProperties"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, "os_type": {"key": "osType", "type": "str"}, "provisioning_state": {"key": "provisioningState", "type": "str"}, "stage": {"key": "stage", "type": "str"}, @@ -9014,6 +12003,7 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, is_anonymous: bool = False, is_archived: bool = False, auto_rebuild: Optional[Union[str, "_models.AutoRebuildSetting"]] = None, @@ -9021,6 +12011,7 @@ def __init__( conda_file: Optional[str] = None, image: Optional[str] = None, inference_config: Optional["_models.InferenceContainerProperties"] = None, + intellectual_property: Optional["_models.IntellectualProperty"] = None, os_type: Optional[Union[str, "_models.OperatingSystemType"]] = None, stage: Optional[str] = None, **kwargs: Any @@ -9032,9 +12023,13 @@ def __init__( :paramtype properties: dict[str, str] :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] - :keyword is_anonymous: If the name version are system generated (anonymous registration). + :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :keyword is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. :paramtype is_anonymous: bool - :keyword is_archived: Is the asset archived?. + :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. :paramtype is_archived: bool :keyword auto_rebuild: Defines if image needs to be rebuilt based on base image changes. Known values are: "Disabled" and "OnBaseImageUpdate". @@ -9063,6 +12058,10 @@ def __init__( :keyword inference_config: Defines configuration specific to inference. :paramtype inference_config: ~azure.mgmt.machinelearningservices.models.InferenceContainerProperties + :keyword intellectual_property: Intellectual Property details. Used if environment is an + Intellectual Property. + :paramtype intellectual_property: + ~azure.mgmt.machinelearningservices.models.IntellectualProperty :keyword os_type: The OS type of the environment. Known values are: "Linux" and "Windows". :paramtype os_type: str or ~azure.mgmt.machinelearningservices.models.OperatingSystemType :keyword stage: Stage in the environment lifecycle assigned to this environment. @@ -9072,6 +12071,7 @@ def __init__( description=description, properties=properties, tags=tags, + auto_delete_setting=auto_delete_setting, is_anonymous=is_anonymous, is_archived=is_archived, **kwargs @@ -9082,6 +12082,7 @@ def __init__( self.environment_type = None self.image = image self.inference_config = inference_config + self.intellectual_property = intellectual_property self.os_type = os_type self.provisioning_state = None self.stage = stage @@ -9321,1052 +12322,1358 @@ class ExternalFQDNResponse(_serialization.Model): """ExternalFQDNResponse. :ivar value: - :vartype value: list[~azure.mgmt.machinelearningservices.models.FQDNEndpoints] + :vartype value: list[~azure.mgmt.machinelearningservices.models.FQDNEndpointsPropertyBag] """ _attribute_map = { - "value": {"key": "value", "type": "[FQDNEndpoints]"}, + "value": {"key": "value", "type": "[FQDNEndpointsPropertyBag]"}, } - def __init__(self, *, value: Optional[List["_models.FQDNEndpoints"]] = None, **kwargs: Any) -> None: + def __init__(self, *, value: Optional[List["_models.FQDNEndpointsPropertyBag"]] = None, **kwargs: Any) -> None: """ :keyword value: - :paramtype value: list[~azure.mgmt.machinelearningservices.models.FQDNEndpoints] + :paramtype value: list[~azure.mgmt.machinelearningservices.models.FQDNEndpointsPropertyBag] """ super().__init__(**kwargs) self.value = value -class FeaturizationSettings(_serialization.Model): - """Featurization Configuration. +class Feature(Resource): + """Azure Resource Manager resource envelope. - :ivar dataset_language: Dataset language, useful for the text data. - :vartype dataset_language: str + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.FeatureProperties """ + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, + } + _attribute_map = { - "dataset_language": {"key": "datasetLanguage", "type": "str"}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "FeatureProperties"}, } - def __init__(self, *, dataset_language: Optional[str] = None, **kwargs: Any) -> None: + def __init__(self, *, properties: "_models.FeatureProperties", **kwargs: Any) -> None: """ - :keyword dataset_language: Dataset language, useful for the text data. - :paramtype dataset_language: str + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.FeatureProperties """ super().__init__(**kwargs) - self.dataset_language = dataset_language + self.properties = properties -class FlavorData(_serialization.Model): - """FlavorData. +class FeatureAttributionDriftMonitoringSignal(MonitoringSignalBase): + """FeatureAttributionDriftMonitoringSignal. - :ivar data: Model flavor-specific data. - :vartype data: dict[str, str] + All required parameters must be populated in order to send to Azure. + + :ivar mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar signal_type: [Required] Specifies the type of signal to monitor. Required. Known values + are: "DataDrift", "PredictionDrift", "DataQuality", "FeatureAttributionDrift", "Custom", + "ModelPerformance", "GenerationSafetyQuality", and "GenerationTokenStatistics". + :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType + :ivar metric_threshold: [Required] A list of metrics to calculate and their associated + thresholds. Required. + :vartype metric_threshold: + ~azure.mgmt.machinelearningservices.models.FeatureAttributionMetricThreshold + :ivar production_data: [Required] The data which drift will be calculated for. Required. + :vartype production_data: + list[~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase] + :ivar reference_data: [Required] The data to calculate drift against. Required. + :vartype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase """ + _validation = { + "signal_type": {"required": True}, + "metric_threshold": {"required": True}, + "production_data": {"required": True}, + "reference_data": {"required": True}, + } + _attribute_map = { - "data": {"key": "data", "type": "{str}"}, + "mode": {"key": "mode", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "metric_threshold": {"key": "metricThreshold", "type": "FeatureAttributionMetricThreshold"}, + "production_data": {"key": "productionData", "type": "[MonitoringInputDataBase]"}, + "reference_data": {"key": "referenceData", "type": "MonitoringInputDataBase"}, } - def __init__(self, *, data: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: - """ - :keyword data: Model flavor-specific data. - :paramtype data: dict[str, str] + def __init__( + self, + *, + metric_threshold: "_models.FeatureAttributionMetricThreshold", + production_data: List["_models.MonitoringInputDataBase"], + reference_data: "_models.MonitoringInputDataBase", + mode: Optional[Union[str, "_models.MonitoringNotificationMode"]] = None, + properties: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> None: """ - super().__init__(**kwargs) - self.data = data - - -class Forecasting(TableVertical, AutoMLVertical): # pylint: disable=too-many-instance-attributes - """Forecasting task in AutoML Table vertical. + :keyword mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword metric_threshold: [Required] A list of metrics to calculate and their associated + thresholds. Required. + :paramtype metric_threshold: + ~azure.mgmt.machinelearningservices.models.FeatureAttributionMetricThreshold + :keyword production_data: [Required] The data which drift will be calculated for. Required. + :paramtype production_data: + list[~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase] + :keyword reference_data: [Required] The data to calculate drift against. Required. + :paramtype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase + """ + super().__init__(mode=mode, properties=properties, **kwargs) + self.signal_type: str = "FeatureAttributionDrift" + self.metric_threshold = metric_threshold + self.production_data = production_data + self.reference_data = reference_data + + +class FeatureAttributionMetricThreshold(_serialization.Model): + """FeatureAttributionMetricThreshold. All required parameters must be populated in order to send to Azure. - :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", - "Warning", "Error", and "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: - "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", and "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: [Required] Training data input. Required. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar cv_split_column_names: Columns to use for CVSplit data. - :vartype cv_split_column_names: list[str] - :ivar featurization_settings: Featurization inputs needed for AutoML job. - :vartype featurization_settings: - ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings - :ivar limit_settings: Execution constraints for AutoMLJob. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings - :ivar n_cross_validations: Number of cross validation folds to be applied on training dataset - when validation dataset is not provided. - :vartype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations - :ivar test_data: Test data input. - :vartype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar test_data_size: The fraction of test dataset that needs to be set aside for validation - purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype test_data_size: float - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float - :ivar weight_column_name: The name of the sample weight column. Automated ML supports a - weighted column as an input, causing rows in the data to be weighted up or down. - :vartype weight_column_name: str - :ivar forecasting_settings: Forecasting task specific inputs. - :vartype forecasting_settings: ~azure.mgmt.machinelearningservices.models.ForecastingSettings - :ivar primary_metric: Primary metric for forecasting task. Known values are: - "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", and - "NormalizedMeanAbsoluteError". - :vartype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ForecastingPrimaryMetrics - :ivar training_settings: Inputs for training phase for an AutoML Job. - :vartype training_settings: - ~azure.mgmt.machinelearningservices.models.ForecastingTrainingSettings + :ivar metric: [Required] The feature attribution metric to calculate. Required. + "NormalizedDiscountedCumulativeGain" + :vartype metric: str or ~azure.mgmt.machinelearningservices.models.FeatureAttributionMetric + :ivar threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold """ _validation = { - "task_type": {"required": True}, - "training_data": {"required": True}, + "metric": {"required": True}, } _attribute_map = { - "log_verbosity": {"key": "logVerbosity", "type": "str"}, - "target_column_name": {"key": "targetColumnName", "type": "str"}, - "task_type": {"key": "taskType", "type": "str"}, - "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, - "cv_split_column_names": {"key": "cvSplitColumnNames", "type": "[str]"}, - "featurization_settings": {"key": "featurizationSettings", "type": "TableVerticalFeaturizationSettings"}, - "limit_settings": {"key": "limitSettings", "type": "TableVerticalLimitSettings"}, - "n_cross_validations": {"key": "nCrossValidations", "type": "NCrossValidations"}, - "test_data": {"key": "testData", "type": "MLTableJobInput"}, - "test_data_size": {"key": "testDataSize", "type": "float"}, - "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, - "validation_data_size": {"key": "validationDataSize", "type": "float"}, - "weight_column_name": {"key": "weightColumnName", "type": "str"}, - "forecasting_settings": {"key": "forecastingSettings", "type": "ForecastingSettings"}, - "primary_metric": {"key": "primaryMetric", "type": "str"}, - "training_settings": {"key": "trainingSettings", "type": "ForecastingTrainingSettings"}, + "metric": {"key": "metric", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, } def __init__( self, *, - training_data: "_models.MLTableJobInput", - log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, - target_column_name: Optional[str] = None, - cv_split_column_names: Optional[List[str]] = None, - featurization_settings: Optional["_models.TableVerticalFeaturizationSettings"] = None, - limit_settings: Optional["_models.TableVerticalLimitSettings"] = None, - n_cross_validations: Optional["_models.NCrossValidations"] = None, - test_data: Optional["_models.MLTableJobInput"] = None, - test_data_size: Optional[float] = None, - validation_data: Optional["_models.MLTableJobInput"] = None, - validation_data_size: Optional[float] = None, - weight_column_name: Optional[str] = None, - forecasting_settings: Optional["_models.ForecastingSettings"] = None, - primary_metric: Optional[Union[str, "_models.ForecastingPrimaryMetrics"]] = None, - training_settings: Optional["_models.ForecastingTrainingSettings"] = None, + metric: Union[str, "_models.FeatureAttributionMetric"], + threshold: Optional["_models.MonitoringThreshold"] = None, **kwargs: Any ) -> None: """ - :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", - "Warning", "Error", and "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: [Required] Training data input. Required. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword cv_split_column_names: Columns to use for CVSplit data. - :paramtype cv_split_column_names: list[str] - :keyword featurization_settings: Featurization inputs needed for AutoML job. - :paramtype featurization_settings: - ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings - :keyword limit_settings: Execution constraints for AutoMLJob. - :paramtype limit_settings: - ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings - :keyword n_cross_validations: Number of cross validation folds to be applied on training - dataset - when validation dataset is not provided. - :paramtype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations - :keyword test_data: Test data input. - :paramtype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword test_data_size: The fraction of test dataset that needs to be set aside for validation - purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype test_data_size: float - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype validation_data_size: float - :keyword weight_column_name: The name of the sample weight column. Automated ML supports a - weighted column as an input, causing rows in the data to be weighted up or down. - :paramtype weight_column_name: str - :keyword forecasting_settings: Forecasting task specific inputs. - :paramtype forecasting_settings: ~azure.mgmt.machinelearningservices.models.ForecastingSettings - :keyword primary_metric: Primary metric for forecasting task. Known values are: - "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", and - "NormalizedMeanAbsoluteError". - :paramtype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ForecastingPrimaryMetrics - :keyword training_settings: Inputs for training phase for an AutoML Job. - :paramtype training_settings: - ~azure.mgmt.machinelearningservices.models.ForecastingTrainingSettings + :keyword metric: [Required] The feature attribution metric to calculate. Required. + "NormalizedDiscountedCumulativeGain" + :paramtype metric: str or ~azure.mgmt.machinelearningservices.models.FeatureAttributionMetric + :keyword threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold """ - super().__init__( - cv_split_column_names=cv_split_column_names, - featurization_settings=featurization_settings, - limit_settings=limit_settings, - n_cross_validations=n_cross_validations, - test_data=test_data, - test_data_size=test_data_size, - validation_data=validation_data, - validation_data_size=validation_data_size, - weight_column_name=weight_column_name, - log_verbosity=log_verbosity, - target_column_name=target_column_name, - training_data=training_data, - **kwargs - ) - self.log_verbosity = log_verbosity - self.target_column_name = target_column_name - self.task_type: str = "Forecasting" - self.training_data = training_data - self.forecasting_settings = forecasting_settings - self.primary_metric = primary_metric - self.training_settings = training_settings - self.cv_split_column_names = cv_split_column_names - self.featurization_settings = featurization_settings - self.limit_settings = limit_settings - self.n_cross_validations = n_cross_validations - self.test_data = test_data - self.test_data_size = test_data_size - self.validation_data = validation_data - self.validation_data_size = validation_data_size - self.weight_column_name = weight_column_name + super().__init__(**kwargs) + self.metric = metric + self.threshold = threshold -class ForecastingSettings(_serialization.Model): # pylint: disable=too-many-instance-attributes - """Forecasting specific parameters. +class FeatureProperties(ResourceBase): + """Dto object representing feature. - :ivar country_or_region_for_holidays: Country or region for holidays for forecasting tasks. - These should be ISO 3166 two-letter country/region codes, for example 'US' or 'GB'. - :vartype country_or_region_for_holidays: str - :ivar cv_step_size: Number of periods between the origin time of one CV fold and the next fold. - For - example, if ``CVStepSize`` = 3 for daily data, the origin time for each fold will be - three days apart. - :vartype cv_step_size: int - :ivar feature_lags: Flag for generating lags for the numeric features with 'auto' or null. - Known values are: "None" and "Auto". - :vartype feature_lags: str or ~azure.mgmt.machinelearningservices.models.FeatureLags - :ivar forecast_horizon: The desired maximum forecast horizon in units of time-series frequency. - :vartype forecast_horizon: ~azure.mgmt.machinelearningservices.models.ForecastHorizon - :ivar frequency: When forecasting, this parameter represents the period with which the forecast - is desired, for example daily, weekly, yearly, etc. The forecast frequency is dataset frequency - by default. - :vartype frequency: str - :ivar seasonality: Set time series seasonality as an integer multiple of the series frequency. - If seasonality is set to 'auto', it will be inferred. - :vartype seasonality: ~azure.mgmt.machinelearningservices.models.Seasonality - :ivar short_series_handling_config: The parameter defining how if AutoML should handle short - time series. Known values are: "None", "Auto", "Pad", and "Drop". - :vartype short_series_handling_config: str or - ~azure.mgmt.machinelearningservices.models.ShortSeriesHandlingConfiguration - :ivar target_aggregate_function: The function to be used to aggregate the time series target - column to conform to a user specified frequency. - If the TargetAggregateFunction is set i.e. not 'None', but the freq parameter is not set, the - error is raised. The possible target aggregation functions are: "sum", "max", "min" and "mean". - Known values are: "None", "Sum", "Max", "Min", and "Mean". - :vartype target_aggregate_function: str or - ~azure.mgmt.machinelearningservices.models.TargetAggregationFunction - :ivar target_lags: The number of past periods to lag from the target column. - :vartype target_lags: ~azure.mgmt.machinelearningservices.models.TargetLags - :ivar target_rolling_window_size: The number of past periods used to create a rolling window - average of the target column. - :vartype target_rolling_window_size: - ~azure.mgmt.machinelearningservices.models.TargetRollingWindowSize - :ivar time_column_name: The name of the time column. This parameter is required when - forecasting to specify the datetime column in the input data used for building the time series - and inferring its frequency. - :vartype time_column_name: str - :ivar time_series_id_column_names: The names of columns used to group a timeseries. It can be - used to create multiple series. - If grain is not defined, the data set is assumed to be one time-series. This parameter is used - with task type forecasting. - :vartype time_series_id_column_names: list[str] - :ivar use_stl: Configure STL Decomposition of the time-series target column. Known values are: - "None", "Season", and "SeasonTrend". - :vartype use_stl: str or ~azure.mgmt.machinelearningservices.models.UseStl + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar data_type: Specifies type. Known values are: "String", "Integer", "Long", "Float", + "Double", "Binary", "Datetime", and "Boolean". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.FeatureDataType + :ivar feature_name: Specifies name. + :vartype feature_name: str """ _attribute_map = { - "country_or_region_for_holidays": {"key": "countryOrRegionForHolidays", "type": "str"}, - "cv_step_size": {"key": "cvStepSize", "type": "int"}, - "feature_lags": {"key": "featureLags", "type": "str"}, - "forecast_horizon": {"key": "forecastHorizon", "type": "ForecastHorizon"}, - "frequency": {"key": "frequency", "type": "str"}, - "seasonality": {"key": "seasonality", "type": "Seasonality"}, - "short_series_handling_config": {"key": "shortSeriesHandlingConfig", "type": "str"}, - "target_aggregate_function": {"key": "targetAggregateFunction", "type": "str"}, - "target_lags": {"key": "targetLags", "type": "TargetLags"}, - "target_rolling_window_size": {"key": "targetRollingWindowSize", "type": "TargetRollingWindowSize"}, - "time_column_name": {"key": "timeColumnName", "type": "str"}, - "time_series_id_column_names": {"key": "timeSeriesIdColumnNames", "type": "[str]"}, - "use_stl": {"key": "useStl", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "data_type": {"key": "dataType", "type": "str"}, + "feature_name": {"key": "featureName", "type": "str"}, } def __init__( self, *, - country_or_region_for_holidays: Optional[str] = None, - cv_step_size: Optional[int] = None, - feature_lags: Optional[Union[str, "_models.FeatureLags"]] = None, - forecast_horizon: Optional["_models.ForecastHorizon"] = None, - frequency: Optional[str] = None, - seasonality: Optional["_models.Seasonality"] = None, - short_series_handling_config: Optional[Union[str, "_models.ShortSeriesHandlingConfiguration"]] = None, - target_aggregate_function: Optional[Union[str, "_models.TargetAggregationFunction"]] = None, - target_lags: Optional["_models.TargetLags"] = None, - target_rolling_window_size: Optional["_models.TargetRollingWindowSize"] = None, - time_column_name: Optional[str] = None, - time_series_id_column_names: Optional[List[str]] = None, - use_stl: Optional[Union[str, "_models.UseStl"]] = None, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + data_type: Optional[Union[str, "_models.FeatureDataType"]] = None, + feature_name: Optional[str] = None, **kwargs: Any ) -> None: """ - :keyword country_or_region_for_holidays: Country or region for holidays for forecasting tasks. - These should be ISO 3166 two-letter country/region codes, for example 'US' or 'GB'. - :paramtype country_or_region_for_holidays: str - :keyword cv_step_size: Number of periods between the origin time of one CV fold and the next - fold. For - example, if ``CVStepSize`` = 3 for daily data, the origin time for each fold will be - three days apart. - :paramtype cv_step_size: int - :keyword feature_lags: Flag for generating lags for the numeric features with 'auto' or null. - Known values are: "None" and "Auto". - :paramtype feature_lags: str or ~azure.mgmt.machinelearningservices.models.FeatureLags - :keyword forecast_horizon: The desired maximum forecast horizon in units of time-series - frequency. - :paramtype forecast_horizon: ~azure.mgmt.machinelearningservices.models.ForecastHorizon - :keyword frequency: When forecasting, this parameter represents the period with which the - forecast is desired, for example daily, weekly, yearly, etc. The forecast frequency is dataset - frequency by default. - :paramtype frequency: str - :keyword seasonality: Set time series seasonality as an integer multiple of the series - frequency. - If seasonality is set to 'auto', it will be inferred. - :paramtype seasonality: ~azure.mgmt.machinelearningservices.models.Seasonality - :keyword short_series_handling_config: The parameter defining how if AutoML should handle short - time series. Known values are: "None", "Auto", "Pad", and "Drop". - :paramtype short_series_handling_config: str or - ~azure.mgmt.machinelearningservices.models.ShortSeriesHandlingConfiguration - :keyword target_aggregate_function: The function to be used to aggregate the time series target - column to conform to a user specified frequency. - If the TargetAggregateFunction is set i.e. not 'None', but the freq parameter is not set, the - error is raised. The possible target aggregation functions are: "sum", "max", "min" and "mean". - Known values are: "None", "Sum", "Max", "Min", and "Mean". - :paramtype target_aggregate_function: str or - ~azure.mgmt.machinelearningservices.models.TargetAggregationFunction - :keyword target_lags: The number of past periods to lag from the target column. - :paramtype target_lags: ~azure.mgmt.machinelearningservices.models.TargetLags - :keyword target_rolling_window_size: The number of past periods used to create a rolling window - average of the target column. - :paramtype target_rolling_window_size: - ~azure.mgmt.machinelearningservices.models.TargetRollingWindowSize - :keyword time_column_name: The name of the time column. This parameter is required when - forecasting to specify the datetime column in the input data used for building the time series - and inferring its frequency. - :paramtype time_column_name: str - :keyword time_series_id_column_names: The names of columns used to group a timeseries. It can - be used to create multiple series. - If grain is not defined, the data set is assumed to be one time-series. This parameter is used - with task type forecasting. - :paramtype time_series_id_column_names: list[str] - :keyword use_stl: Configure STL Decomposition of the time-series target column. Known values - are: "None", "Season", and "SeasonTrend". - :paramtype use_stl: str or ~azure.mgmt.machinelearningservices.models.UseStl + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword data_type: Specifies type. Known values are: "String", "Integer", "Long", "Float", + "Double", "Binary", "Datetime", and "Boolean". + :paramtype data_type: str or ~azure.mgmt.machinelearningservices.models.FeatureDataType + :keyword feature_name: Specifies name. + :paramtype feature_name: str """ - super().__init__(**kwargs) - self.country_or_region_for_holidays = country_or_region_for_holidays - self.cv_step_size = cv_step_size - self.feature_lags = feature_lags - self.forecast_horizon = forecast_horizon - self.frequency = frequency - self.seasonality = seasonality - self.short_series_handling_config = short_series_handling_config - self.target_aggregate_function = target_aggregate_function - self.target_lags = target_lags - self.target_rolling_window_size = target_rolling_window_size - self.time_column_name = time_column_name - self.time_series_id_column_names = time_series_id_column_names - self.use_stl = use_stl + super().__init__(description=description, properties=properties, tags=tags, **kwargs) + self.data_type = data_type + self.feature_name = feature_name -class ForecastingTrainingSettings(TrainingSettings): - """Forecasting Training related configuration. +class FeatureResourceArmPaginatedResult(_serialization.Model): + """A paginated list of Feature entities. - :ivar enable_dnn_training: Enable recommendation of DNN models. - :vartype enable_dnn_training: bool - :ivar enable_model_explainability: Flag to turn on explainability on best model. - :vartype enable_model_explainability: bool - :ivar enable_onnx_compatible_models: Flag for enabling onnx compatible models. - :vartype enable_onnx_compatible_models: bool - :ivar enable_stack_ensemble: Enable stack ensemble run. - :vartype enable_stack_ensemble: bool - :ivar enable_vote_ensemble: Enable voting ensemble run. - :vartype enable_vote_ensemble: bool - :ivar ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model - generation, multiple fitted models from the previous child runs are downloaded. - Configure this parameter with a higher value than 300 secs, if more time is needed. - :vartype ensemble_model_download_timeout: ~datetime.timedelta - :ivar stack_ensemble_settings: Stack ensemble settings for stack ensemble run. - :vartype stack_ensemble_settings: - ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings - :ivar allowed_training_algorithms: Allowed models for forecasting task. - :vartype allowed_training_algorithms: list[str or - ~azure.mgmt.machinelearningservices.models.ForecastingModels] - :ivar blocked_training_algorithms: Blocked models for forecasting task. - :vartype blocked_training_algorithms: list[str or - ~azure.mgmt.machinelearningservices.models.ForecastingModels] + :ivar next_link: The link to the next page of Feature objects. If null, there are no additional + pages. + :vartype next_link: str + :ivar value: An array of objects of type Feature. + :vartype value: list[~azure.mgmt.machinelearningservices.models.Feature] """ _attribute_map = { - "enable_dnn_training": {"key": "enableDnnTraining", "type": "bool"}, - "enable_model_explainability": {"key": "enableModelExplainability", "type": "bool"}, - "enable_onnx_compatible_models": {"key": "enableOnnxCompatibleModels", "type": "bool"}, - "enable_stack_ensemble": {"key": "enableStackEnsemble", "type": "bool"}, - "enable_vote_ensemble": {"key": "enableVoteEnsemble", "type": "bool"}, - "ensemble_model_download_timeout": {"key": "ensembleModelDownloadTimeout", "type": "duration"}, - "stack_ensemble_settings": {"key": "stackEnsembleSettings", "type": "StackEnsembleSettings"}, - "allowed_training_algorithms": {"key": "allowedTrainingAlgorithms", "type": "[str]"}, - "blocked_training_algorithms": {"key": "blockedTrainingAlgorithms", "type": "[str]"}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[Feature]"}, } def __init__( - self, - *, - enable_dnn_training: bool = False, - enable_model_explainability: bool = True, - enable_onnx_compatible_models: bool = False, - enable_stack_ensemble: bool = True, - enable_vote_ensemble: bool = True, - ensemble_model_download_timeout: datetime.timedelta = "PT5M", - stack_ensemble_settings: Optional["_models.StackEnsembleSettings"] = None, - allowed_training_algorithms: Optional[List[Union[str, "_models.ForecastingModels"]]] = None, - blocked_training_algorithms: Optional[List[Union[str, "_models.ForecastingModels"]]] = None, - **kwargs: Any + self, *, next_link: Optional[str] = None, value: Optional[List["_models.Feature"]] = None, **kwargs: Any ) -> None: """ - :keyword enable_dnn_training: Enable recommendation of DNN models. - :paramtype enable_dnn_training: bool - :keyword enable_model_explainability: Flag to turn on explainability on best model. - :paramtype enable_model_explainability: bool - :keyword enable_onnx_compatible_models: Flag for enabling onnx compatible models. - :paramtype enable_onnx_compatible_models: bool - :keyword enable_stack_ensemble: Enable stack ensemble run. - :paramtype enable_stack_ensemble: bool - :keyword enable_vote_ensemble: Enable voting ensemble run. - :paramtype enable_vote_ensemble: bool - :keyword ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model - generation, multiple fitted models from the previous child runs are downloaded. - Configure this parameter with a higher value than 300 secs, if more time is needed. - :paramtype ensemble_model_download_timeout: ~datetime.timedelta - :keyword stack_ensemble_settings: Stack ensemble settings for stack ensemble run. - :paramtype stack_ensemble_settings: - ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings - :keyword allowed_training_algorithms: Allowed models for forecasting task. - :paramtype allowed_training_algorithms: list[str or - ~azure.mgmt.machinelearningservices.models.ForecastingModels] - :keyword blocked_training_algorithms: Blocked models for forecasting task. - :paramtype blocked_training_algorithms: list[str or - ~azure.mgmt.machinelearningservices.models.ForecastingModels] + :keyword next_link: The link to the next page of Feature objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type Feature. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.Feature] """ - super().__init__( - enable_dnn_training=enable_dnn_training, - enable_model_explainability=enable_model_explainability, - enable_onnx_compatible_models=enable_onnx_compatible_models, - enable_stack_ensemble=enable_stack_ensemble, - enable_vote_ensemble=enable_vote_ensemble, - ensemble_model_download_timeout=ensemble_model_download_timeout, - stack_ensemble_settings=stack_ensemble_settings, - **kwargs - ) - self.allowed_training_algorithms = allowed_training_algorithms - self.blocked_training_algorithms = blocked_training_algorithms + super().__init__(**kwargs) + self.next_link = next_link + self.value = value -class FQDNEndpoint(_serialization.Model): - """FQDNEndpoint. +class FeaturesetContainer(Resource): + """Azure Resource Manager resource envelope. - :ivar domain_name: - :vartype domain_name: str - :ivar endpoint_details: - :vartype endpoint_details: list[~azure.mgmt.machinelearningservices.models.FQDNEndpointDetail] + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.FeaturesetContainerProperties """ + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, + } + _attribute_map = { - "domain_name": {"key": "domainName", "type": "str"}, - "endpoint_details": {"key": "endpointDetails", "type": "[FQDNEndpointDetail]"}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "FeaturesetContainerProperties"}, } - def __init__( - self, - *, - domain_name: Optional[str] = None, - endpoint_details: Optional[List["_models.FQDNEndpointDetail"]] = None, - **kwargs: Any - ) -> None: + def __init__(self, *, properties: "_models.FeaturesetContainerProperties", **kwargs: Any) -> None: """ - :keyword domain_name: - :paramtype domain_name: str - :keyword endpoint_details: - :paramtype endpoint_details: - list[~azure.mgmt.machinelearningservices.models.FQDNEndpointDetail] + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.FeaturesetContainerProperties """ super().__init__(**kwargs) - self.domain_name = domain_name - self.endpoint_details = endpoint_details + self.properties = properties -class FQDNEndpointDetail(_serialization.Model): - """FQDNEndpointDetail. +class FeaturesetContainerProperties(AssetContainer): + """Dto object representing feature set. - :ivar port: - :vartype port: int + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar latest_version: The latest version inside this container. + :vartype latest_version: str + :ivar next_version: The next auto incremental version. + :vartype next_version: str + :ivar provisioning_state: Provisioning state for the featureset container. Known values are: + "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.AssetProvisioningState """ + _validation = { + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, + "provisioning_state": {"readonly": True}, + } + _attribute_map = { - "port": {"key": "port", "type": "int"}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } - def __init__(self, *, port: Optional[int] = None, **kwargs: Any) -> None: + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_archived: bool = False, + **kwargs: Any + ) -> None: """ - :keyword port: - :paramtype port: int + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool """ - super().__init__(**kwargs) - self.port = port + super().__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) + self.provisioning_state = None -class FQDNEndpoints(_serialization.Model): - """FQDNEndpoints. +class FeaturesetContainerResourceArmPaginatedResult(_serialization.Model): + """A paginated list of FeaturesetContainer entities. - :ivar properties: - :vartype properties: ~azure.mgmt.machinelearningservices.models.FQDNEndpointsProperties + :ivar next_link: The link to the next page of FeaturesetContainer objects. If null, there are + no additional pages. + :vartype next_link: str + :ivar value: An array of objects of type FeaturesetContainer. + :vartype value: list[~azure.mgmt.machinelearningservices.models.FeaturesetContainer] """ _attribute_map = { - "properties": {"key": "properties", "type": "FQDNEndpointsProperties"}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[FeaturesetContainer]"}, } - def __init__(self, *, properties: Optional["_models.FQDNEndpointsProperties"] = None, **kwargs: Any) -> None: + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.FeaturesetContainer"]] = None, + **kwargs: Any + ) -> None: """ - :keyword properties: - :paramtype properties: ~azure.mgmt.machinelearningservices.models.FQDNEndpointsProperties + :keyword next_link: The link to the next page of FeaturesetContainer objects. If null, there + are no additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type FeaturesetContainer. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.FeaturesetContainer] """ super().__init__(**kwargs) - self.properties = properties + self.next_link = next_link + self.value = value -class FQDNEndpointsProperties(_serialization.Model): - """FQDNEndpointsProperties. +class FeaturesetSpecification(_serialization.Model): + """Dto object representing specification. - :ivar category: - :vartype category: str - :ivar endpoints: - :vartype endpoints: list[~azure.mgmt.machinelearningservices.models.FQDNEndpoint] + :ivar path: Specifies the spec path. + :vartype path: str """ _attribute_map = { - "category": {"key": "category", "type": "str"}, - "endpoints": {"key": "endpoints", "type": "[FQDNEndpoint]"}, + "path": {"key": "path", "type": "str"}, } - def __init__( - self, *, category: Optional[str] = None, endpoints: Optional[List["_models.FQDNEndpoint"]] = None, **kwargs: Any - ) -> None: + def __init__(self, *, path: Optional[str] = None, **kwargs: Any) -> None: """ - :keyword category: - :paramtype category: str - :keyword endpoints: - :paramtype endpoints: list[~azure.mgmt.machinelearningservices.models.FQDNEndpoint] + :keyword path: Specifies the spec path. + :paramtype path: str """ super().__init__(**kwargs) - self.category = category - self.endpoints = endpoints + self.path = path -class GridSamplingAlgorithm(SamplingAlgorithm): - """Defines a Sampling Algorithm that exhaustively generates every value combination in the space. +class FeaturesetVersion(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar sampling_algorithm_type: [Required] The algorithm used for generating hyperparameter - values, along with configuration properties. Required. Known values are: "Grid", "Random", and - "Bayesian". - :vartype sampling_algorithm_type: str or - ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.FeaturesetVersionProperties """ _validation = { - "sampling_algorithm_type": {"required": True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - "sampling_algorithm_type": {"key": "samplingAlgorithmType", "type": "str"}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "FeaturesetVersionProperties"}, } - def __init__(self, **kwargs: Any) -> None: - """ """ + def __init__(self, *, properties: "_models.FeaturesetVersionProperties", **kwargs: Any) -> None: + """ + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.FeaturesetVersionProperties + """ super().__init__(**kwargs) - self.sampling_algorithm_type: str = "Grid" + self.properties = properties -class HDInsightSchema(_serialization.Model): - """HDInsightSchema. +class FeaturesetVersionBackfillRequest(_serialization.Model): + """Request payload for creating a backfill request for a given feature set version. - :ivar properties: HDInsight compute properties. - :vartype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties + :ivar data_availability_status: Specified the data availability status that you want to + backfill. + :vartype data_availability_status: list[str or + ~azure.mgmt.machinelearningservices.models.DataAvailabilityStatus] + :ivar description: Specifies description. + :vartype description: str + :ivar display_name: Specifies description. + :vartype display_name: str + :ivar feature_window: Specifies the backfill feature window to be materialized. + :vartype feature_window: ~azure.mgmt.machinelearningservices.models.FeatureWindow + :ivar job_id: Specify the jobId to retry the failed materialization. + :vartype job_id: str + :ivar properties: Specifies the properties. + :vartype properties: dict[str, str] + :ivar resource: Specifies the compute resource settings. + :vartype resource: ~azure.mgmt.machinelearningservices.models.MaterializationComputeResource + :ivar spark_configuration: Specifies the spark compute settings. + :vartype spark_configuration: dict[str, str] + :ivar tags: Specifies the tags. + :vartype tags: dict[str, str] """ _attribute_map = { - "properties": {"key": "properties", "type": "HDInsightProperties"}, + "data_availability_status": {"key": "dataAvailabilityStatus", "type": "[str]"}, + "description": {"key": "description", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "feature_window": {"key": "featureWindow", "type": "FeatureWindow"}, + "job_id": {"key": "jobId", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "resource": {"key": "resource", "type": "MaterializationComputeResource"}, + "spark_configuration": {"key": "sparkConfiguration", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, } - def __init__(self, *, properties: Optional["_models.HDInsightProperties"] = None, **kwargs: Any) -> None: + def __init__( + self, + *, + data_availability_status: Optional[List[Union[str, "_models.DataAvailabilityStatus"]]] = None, + description: Optional[str] = None, + display_name: Optional[str] = None, + feature_window: Optional["_models.FeatureWindow"] = None, + job_id: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + resource: Optional["_models.MaterializationComputeResource"] = None, + spark_configuration: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> None: """ - :keyword properties: HDInsight compute properties. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties + :keyword data_availability_status: Specified the data availability status that you want to + backfill. + :paramtype data_availability_status: list[str or + ~azure.mgmt.machinelearningservices.models.DataAvailabilityStatus] + :keyword description: Specifies description. + :paramtype description: str + :keyword display_name: Specifies description. + :paramtype display_name: str + :keyword feature_window: Specifies the backfill feature window to be materialized. + :paramtype feature_window: ~azure.mgmt.machinelearningservices.models.FeatureWindow + :keyword job_id: Specify the jobId to retry the failed materialization. + :paramtype job_id: str + :keyword properties: Specifies the properties. + :paramtype properties: dict[str, str] + :keyword resource: Specifies the compute resource settings. + :paramtype resource: ~azure.mgmt.machinelearningservices.models.MaterializationComputeResource + :keyword spark_configuration: Specifies the spark compute settings. + :paramtype spark_configuration: dict[str, str] + :keyword tags: Specifies the tags. + :paramtype tags: dict[str, str] """ super().__init__(**kwargs) - self.properties = properties + self.data_availability_status = data_availability_status + self.description = description + self.display_name = display_name + self.feature_window = feature_window + self.job_id = job_id + self.properties = properties + self.resource = resource + self.spark_configuration = spark_configuration + self.tags = tags -class HDInsight(Compute, HDInsightSchema): # pylint: disable=too-many-instance-attributes - """A HDInsight compute. +class FeaturesetVersionBackfillResponse(_serialization.Model): + """Response payload for creating a backfill request for a given feature set version. - Variables are only populated by the server, and will be ignored when sending a request. + :ivar job_ids: List of jobs submitted as part of the backfill request. + :vartype job_ids: list[str] + """ - All required parameters must be populated in order to send to Azure. + _attribute_map = { + "job_ids": {"key": "jobIds", "type": "[str]"}, + } - :ivar properties: HDInsight compute properties. - :vartype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties - :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", - "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", - "DataLakeAnalytics", and "SynapseSpark". - :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :ivar compute_location: Location for the underlying compute. - :vartype compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :ivar description: The description of the Machine Learning compute. + def __init__(self, *, job_ids: Optional[List[str]] = None, **kwargs: Any) -> None: + """ + :keyword job_ids: List of jobs submitted as part of the backfill request. + :paramtype job_ids: list[str] + """ + super().__init__(**kwargs) + self.job_ids = job_ids + + +class FeaturesetVersionProperties(AssetBase): # pylint: disable=too-many-instance-attributes + """Dto object representing feature set version. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar description: The asset description text. :vartype description: str - :ivar created_on: The time at which the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The time at which the compute was last modified. - :vartype modified_on: ~datetime.datetime - :ivar resource_id: ARM resource id of the underlying compute. - :vartype resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI - and AAD exclusively for authentication. - :vartype disable_local_auth: bool + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :ivar is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. + :vartype is_archived: bool + :ivar entities: Specifies list of entities. + :vartype entities: list[str] + :ivar materialization_settings: Specifies the materialization settings. + :vartype materialization_settings: + ~azure.mgmt.machinelearningservices.models.MaterializationSettings + :ivar provisioning_state: Provisioning state for the featureset version container. Known values + are: "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.AssetProvisioningState + :ivar specification: Specifies the feature spec details. + :vartype specification: ~azure.mgmt.machinelearningservices.models.FeaturesetSpecification + :ivar stage: Specifies the asset stage. + :vartype stage: str """ _validation = { - "compute_type": {"required": True}, "provisioning_state": {"readonly": True}, - "created_on": {"readonly": True}, - "modified_on": {"readonly": True}, - "provisioning_errors": {"readonly": True}, - "is_attached_compute": {"readonly": True}, } _attribute_map = { - "properties": {"key": "properties", "type": "HDInsightProperties"}, - "compute_type": {"key": "computeType", "type": "str"}, - "compute_location": {"key": "computeLocation", "type": "str"}, - "provisioning_state": {"key": "provisioningState", "type": "str"}, "description": {"key": "description", "type": "str"}, - "created_on": {"key": "createdOn", "type": "iso-8601"}, - "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, - "resource_id": {"key": "resourceId", "type": "str"}, - "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, - "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, - "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "entities": {"key": "entities", "type": "[str]"}, + "materialization_settings": {"key": "materializationSettings", "type": "MaterializationSettings"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "specification": {"key": "specification", "type": "FeaturesetSpecification"}, + "stage": {"key": "stage", "type": "str"}, } def __init__( self, *, - properties: Optional["_models.HDInsightProperties"] = None, - compute_location: Optional[str] = None, description: Optional[str] = None, - resource_id: Optional[str] = None, - disable_local_auth: Optional[bool] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + is_anonymous: bool = False, + is_archived: bool = False, + entities: Optional[List[str]] = None, + materialization_settings: Optional["_models.MaterializationSettings"] = None, + specification: Optional["_models.FeaturesetSpecification"] = None, + stage: Optional[str] = None, **kwargs: Any ) -> None: """ - :keyword properties: HDInsight compute properties. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties - :keyword compute_location: Location for the underlying compute. - :paramtype compute_location: str - :keyword description: The description of the Machine Learning compute. + :keyword description: The asset description text. :paramtype description: str - :keyword resource_id: ARM resource id of the underlying compute. - :paramtype resource_id: str - :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only - MSI and AAD exclusively for authentication. - :paramtype disable_local_auth: bool + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :keyword is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. + :paramtype is_archived: bool + :keyword entities: Specifies list of entities. + :paramtype entities: list[str] + :keyword materialization_settings: Specifies the materialization settings. + :paramtype materialization_settings: + ~azure.mgmt.machinelearningservices.models.MaterializationSettings + :keyword specification: Specifies the feature spec details. + :paramtype specification: ~azure.mgmt.machinelearningservices.models.FeaturesetSpecification + :keyword stage: Specifies the asset stage. + :paramtype stage: str """ super().__init__( - compute_location=compute_location, description=description, - resource_id=resource_id, - disable_local_auth=disable_local_auth, properties=properties, + tags=tags, + auto_delete_setting=auto_delete_setting, + is_anonymous=is_anonymous, + is_archived=is_archived, **kwargs ) - self.properties = properties - self.compute_type: str = "HDInsight" - self.compute_location = compute_location + self.entities = entities + self.materialization_settings = materialization_settings self.provisioning_state = None - self.description = description - self.created_on = None - self.modified_on = None - self.resource_id = resource_id - self.provisioning_errors = None - self.is_attached_compute = None - self.disable_local_auth = disable_local_auth + self.specification = specification + self.stage = stage -class HDInsightProperties(_serialization.Model): - """HDInsight compute properties. +class FeaturesetVersionResourceArmPaginatedResult(_serialization.Model): + """A paginated list of FeaturesetVersion entities. - :ivar ssh_port: Port open for ssh connections on the master node of the cluster. - :vartype ssh_port: int - :ivar address: Public IP address of the master node of the cluster. - :vartype address: str - :ivar administrator_account: Admin credentials for master node of the cluster. - :vartype administrator_account: - ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + :ivar next_link: The link to the next page of FeaturesetVersion objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type FeaturesetVersion. + :vartype value: list[~azure.mgmt.machinelearningservices.models.FeaturesetVersion] """ _attribute_map = { - "ssh_port": {"key": "sshPort", "type": "int"}, - "address": {"key": "address", "type": "str"}, - "administrator_account": {"key": "administratorAccount", "type": "VirtualMachineSshCredentials"}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[FeaturesetVersion]"}, } def __init__( self, *, - ssh_port: Optional[int] = None, - address: Optional[str] = None, - administrator_account: Optional["_models.VirtualMachineSshCredentials"] = None, + next_link: Optional[str] = None, + value: Optional[List["_models.FeaturesetVersion"]] = None, **kwargs: Any ) -> None: """ - :keyword ssh_port: Port open for ssh connections on the master node of the cluster. - :paramtype ssh_port: int - :keyword address: Public IP address of the master node of the cluster. - :paramtype address: str - :keyword administrator_account: Admin credentials for master node of the cluster. - :paramtype administrator_account: - ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + :keyword next_link: The link to the next page of FeaturesetVersion objects. If null, there are + no additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type FeaturesetVersion. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.FeaturesetVersion] """ super().__init__(**kwargs) - self.ssh_port = ssh_port - self.address = address - self.administrator_account = administrator_account + self.next_link = next_link + self.value = value -class IdAssetReference(AssetReferenceBase): - """Reference to an asset via its ARM resource ID. +class FeaturestoreEntityContainer(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar reference_type: [Required] Specifies the type of asset reference. Required. Known values - are: "Id", "DataPath", and "OutputPath". - :vartype reference_type: str or ~azure.mgmt.machinelearningservices.models.ReferenceType - :ivar asset_id: [Required] ARM resource ID of the asset. Required. - :vartype asset_id: str + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: + ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainerProperties """ _validation = { - "reference_type": {"required": True}, - "asset_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - "reference_type": {"key": "referenceType", "type": "str"}, - "asset_id": {"key": "assetId", "type": "str"}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "FeaturestoreEntityContainerProperties"}, } - def __init__(self, *, asset_id: str, **kwargs: Any) -> None: + def __init__(self, *, properties: "_models.FeaturestoreEntityContainerProperties", **kwargs: Any) -> None: """ - :keyword asset_id: [Required] ARM resource ID of the asset. Required. - :paramtype asset_id: str + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: + ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainerProperties """ super().__init__(**kwargs) - self.reference_type: str = "Id" - self.asset_id = asset_id + self.properties = properties -class IdentityForCmk(_serialization.Model): - """Identity that will be used to access key vault for encryption at rest. +class FeaturestoreEntityContainerProperties(AssetContainer): + """Dto object representing feature entity. - :ivar user_assigned_identity: The ArmId of the user assigned identity that will be used to - access the customer managed key vault. - :vartype user_assigned_identity: str + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar latest_version: The latest version inside this container. + :vartype latest_version: str + :ivar next_version: The next auto incremental version. + :vartype next_version: str + :ivar provisioning_state: Provisioning state for the featurestore entity container. Known + values are: "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.AssetProvisioningState """ + _validation = { + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, + "provisioning_state": {"readonly": True}, + } + _attribute_map = { - "user_assigned_identity": {"key": "userAssignedIdentity", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } - def __init__(self, *, user_assigned_identity: Optional[str] = None, **kwargs: Any) -> None: + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_archived: bool = False, + **kwargs: Any + ) -> None: """ - :keyword user_assigned_identity: The ArmId of the user assigned identity that will be used to - access the customer managed key vault. - :paramtype user_assigned_identity: str + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool """ - super().__init__(**kwargs) - self.user_assigned_identity = user_assigned_identity + super().__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) + self.provisioning_state = None -class IdleShutdownSetting(_serialization.Model): - """Stops compute instance after user defined period of inactivity. +class FeaturestoreEntityContainerResourceArmPaginatedResult(_serialization.Model): + """A paginated list of FeaturestoreEntityContainer entities. - :ivar idle_time_before_shutdown: Time is defined in ISO8601 format. Minimum is 15 min, maximum - is 3 days. - :vartype idle_time_before_shutdown: str + :ivar next_link: The link to the next page of FeaturestoreEntityContainer objects. If null, + there are no additional pages. + :vartype next_link: str + :ivar value: An array of objects of type FeaturestoreEntityContainer. + :vartype value: list[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer] """ _attribute_map = { - "idle_time_before_shutdown": {"key": "idleTimeBeforeShutdown", "type": "str"}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[FeaturestoreEntityContainer]"}, } - def __init__(self, *, idle_time_before_shutdown: Optional[str] = None, **kwargs: Any) -> None: + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.FeaturestoreEntityContainer"]] = None, + **kwargs: Any + ) -> None: """ - :keyword idle_time_before_shutdown: Time is defined in ISO8601 format. Minimum is 15 min, - maximum is 3 days. - :paramtype idle_time_before_shutdown: str + :keyword next_link: The link to the next page of FeaturestoreEntityContainer objects. If null, + there are no additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type FeaturestoreEntityContainer. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer] """ super().__init__(**kwargs) - self.idle_time_before_shutdown = idle_time_before_shutdown + self.next_link = next_link + self.value = value -class Image(_serialization.Model): - """Describes the Image Specifications. +class FeaturestoreEntityVersion(Resource): + """Azure Resource Manager resource envelope. - :ivar additional_properties: Unmatched properties from the message are deserialized to this - collection. - :vartype additional_properties: dict[str, any] - :ivar type: Type of the image. Possible values are: docker - For docker images. azureml - For - AzureML images. Known values are: "docker" and "azureml". - :vartype type: str or ~azure.mgmt.machinelearningservices.models.ImageType - :ivar reference: Image reference. - :vartype reference: str + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: + ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersionProperties """ + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, + } + _attribute_map = { - "additional_properties": {"key": "", "type": "{object}"}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, - "reference": {"key": "reference", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "FeaturestoreEntityVersionProperties"}, + } + + def __init__(self, *, properties: "_models.FeaturestoreEntityVersionProperties", **kwargs: Any) -> None: + """ + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: + ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersionProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class FeaturestoreEntityVersionProperties(AssetBase): + """Dto object representing feature entity version. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :ivar is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. + :vartype is_archived: bool + :ivar index_columns: Specifies index columns. + :vartype index_columns: list[~azure.mgmt.machinelearningservices.models.IndexColumn] + :ivar provisioning_state: Provisioning state for the featurestore entity version. Known values + are: "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.AssetProvisioningState + :ivar stage: Specifies the asset stage. + :vartype stage: str + """ + + _validation = { + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "index_columns": {"key": "indexColumns", "type": "[IndexColumn]"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "stage": {"key": "stage", "type": "str"}, } def __init__( self, *, - additional_properties: Optional[Dict[str, Any]] = None, - type: Union[str, "_models.ImageType"] = "docker", - reference: Optional[str] = None, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + is_anonymous: bool = False, + is_archived: bool = False, + index_columns: Optional[List["_models.IndexColumn"]] = None, + stage: Optional[str] = None, **kwargs: Any ) -> None: """ - :keyword additional_properties: Unmatched properties from the message are deserialized to this - collection. - :paramtype additional_properties: dict[str, any] - :keyword type: Type of the image. Possible values are: docker - For docker images. azureml - - For AzureML images. Known values are: "docker" and "azureml". - :paramtype type: str or ~azure.mgmt.machinelearningservices.models.ImageType - :keyword reference: Image reference. - :paramtype reference: str + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :keyword is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. + :paramtype is_archived: bool + :keyword index_columns: Specifies index columns. + :paramtype index_columns: list[~azure.mgmt.machinelearningservices.models.IndexColumn] + :keyword stage: Specifies the asset stage. + :paramtype stage: str """ - super().__init__(**kwargs) - self.additional_properties = additional_properties - self.type = type - self.reference = reference - + super().__init__( + description=description, + properties=properties, + tags=tags, + auto_delete_setting=auto_delete_setting, + is_anonymous=is_anonymous, + is_archived=is_archived, + **kwargs + ) + self.index_columns = index_columns + self.provisioning_state = None + self.stage = stage -class ImageVertical(_serialization.Model): - """Abstract class for AutoML tasks that train image (computer vision) models - - such as Image Classification / Image Classification Multilabel / Image Object Detection / Image - Instance Segmentation. - All required parameters must be populated in order to send to Azure. +class FeaturestoreEntityVersionResourceArmPaginatedResult(_serialization.Model): + """A paginated list of FeaturestoreEntityVersion entities. - :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float + :ivar next_link: The link to the next page of FeaturestoreEntityVersion objects. If null, there + are no additional pages. + :vartype next_link: str + :ivar value: An array of objects of type FeaturestoreEntityVersion. + :vartype value: list[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion] """ - _validation = { - "limit_settings": {"required": True}, + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[FeaturestoreEntityVersion]"}, } + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.FeaturestoreEntityVersion"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword next_link: The link to the next page of FeaturestoreEntityVersion objects. If null, + there are no additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type FeaturestoreEntityVersion. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class FeatureStoreSettings(_serialization.Model): + """FeatureStoreSettings. + + :ivar compute_runtime: + :vartype compute_runtime: ~azure.mgmt.machinelearningservices.models.ComputeRuntimeDto + :ivar offline_store_connection_name: + :vartype offline_store_connection_name: str + :ivar online_store_connection_name: + :vartype online_store_connection_name: str + """ + _attribute_map = { - "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, - "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, - "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, - "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "compute_runtime": {"key": "computeRuntime", "type": "ComputeRuntimeDto"}, + "offline_store_connection_name": {"key": "offlineStoreConnectionName", "type": "str"}, + "online_store_connection_name": {"key": "onlineStoreConnectionName", "type": "str"}, } def __init__( self, *, - limit_settings: "_models.ImageLimitSettings", - sweep_settings: Optional["_models.ImageSweepSettings"] = None, - validation_data: Optional["_models.MLTableJobInput"] = None, - validation_data_size: Optional[float] = None, + compute_runtime: Optional["_models.ComputeRuntimeDto"] = None, + offline_store_connection_name: Optional[str] = None, + online_store_connection_name: Optional[str] = None, **kwargs: Any ) -> None: """ - :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. - :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype validation_data_size: float + :keyword compute_runtime: + :paramtype compute_runtime: ~azure.mgmt.machinelearningservices.models.ComputeRuntimeDto + :keyword offline_store_connection_name: + :paramtype offline_store_connection_name: str + :keyword online_store_connection_name: + :paramtype online_store_connection_name: str """ super().__init__(**kwargs) - self.limit_settings = limit_settings - self.sweep_settings = sweep_settings - self.validation_data = validation_data - self.validation_data_size = validation_data_size + self.compute_runtime = compute_runtime + self.offline_store_connection_name = offline_store_connection_name + self.online_store_connection_name = online_store_connection_name -class ImageClassificationBase(ImageVertical): - """ImageClassificationBase. +class FeatureSubset(MonitoringFeatureFilterBase): + """FeatureSubset. All required parameters must be populated in order to send to Azure. - :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float - :ivar model_settings: Settings used for training the model. - :vartype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] + :ivar filter_type: [Required] Specifies the feature filter to leverage when selecting features + to calculate metrics over. Required. Known values are: "AllFeatures", "TopNByAttribution", and + "FeatureSubset". + :vartype filter_type: str or + ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterType + :ivar features: [Required] The list of features to include. Required. + :vartype features: list[str] """ _validation = { - "limit_settings": {"required": True}, + "filter_type": {"required": True}, + "features": {"required": True}, } _attribute_map = { - "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, - "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, - "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, - "validation_data_size": {"key": "validationDataSize", "type": "float"}, - "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsClassification"}, - "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsClassification]"}, + "filter_type": {"key": "filterType", "type": "str"}, + "features": {"key": "features", "type": "[str]"}, + } + + def __init__(self, *, features: List[str], **kwargs: Any) -> None: + """ + :keyword features: [Required] The list of features to include. Required. + :paramtype features: list[str] + """ + super().__init__(**kwargs) + self.filter_type: str = "FeatureSubset" + self.features = features + + +class FeatureWindow(_serialization.Model): + """Specifies the feature window. + + :ivar feature_window_end: Specifies the feature window end time. + :vartype feature_window_end: ~datetime.datetime + :ivar feature_window_start: Specifies the feature window start time. + :vartype feature_window_start: ~datetime.datetime + """ + + _attribute_map = { + "feature_window_end": {"key": "featureWindowEnd", "type": "iso-8601"}, + "feature_window_start": {"key": "featureWindowStart", "type": "iso-8601"}, } def __init__( self, *, - limit_settings: "_models.ImageLimitSettings", - sweep_settings: Optional["_models.ImageSweepSettings"] = None, - validation_data: Optional["_models.MLTableJobInput"] = None, - validation_data_size: Optional[float] = None, - model_settings: Optional["_models.ImageModelSettingsClassification"] = None, - search_space: Optional[List["_models.ImageModelDistributionSettingsClassification"]] = None, + feature_window_end: Optional[datetime.datetime] = None, + feature_window_start: Optional[datetime.datetime] = None, **kwargs: Any ) -> None: """ - :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. - :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype validation_data_size: float - :keyword model_settings: Settings used for training the model. - :paramtype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification - :keyword search_space: Search space for sampling different combinations of models and their - hyperparameters. - :paramtype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] + :keyword feature_window_end: Specifies the feature window end time. + :paramtype feature_window_end: ~datetime.datetime + :keyword feature_window_start: Specifies the feature window start time. + :paramtype feature_window_start: ~datetime.datetime """ - super().__init__( - limit_settings=limit_settings, - sweep_settings=sweep_settings, - validation_data=validation_data, - validation_data_size=validation_data_size, - **kwargs - ) - self.model_settings = model_settings - self.search_space = search_space + super().__init__(**kwargs) + self.feature_window_end = feature_window_end + self.feature_window_start = feature_window_start -class ImageClassification(ImageClassificationBase, AutoMLVertical): # pylint: disable=too-many-instance-attributes - """Image Classification. Multi-class image classification is used when an image is classified with - only a single label - from a set of classes - e.g. each image is classified as either an image of a 'cat' or a 'dog' - or a 'duck'. +class FeaturizationSettings(_serialization.Model): + """Featurization Configuration. + + :ivar dataset_language: Dataset language, useful for the text data. + :vartype dataset_language: str + """ + + _attribute_map = { + "dataset_language": {"key": "datasetLanguage", "type": "str"}, + } + + def __init__(self, *, dataset_language: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword dataset_language: Dataset language, useful for the text data. + :paramtype dataset_language: str + """ + super().__init__(**kwargs) + self.dataset_language = dataset_language + + +class FileSystemSource(DataImportSource): + """FileSystemSource. + + All required parameters must be populated in order to send to Azure. + + :ivar connection: Workspace connection for data import source storage. + :vartype connection: str + :ivar source_type: [Required] Specifies the type of data. Required. Known values are: + "database" and "file_system". + :vartype source_type: str or ~azure.mgmt.machinelearningservices.models.DataImportSourceType + :ivar path: Path on data import FileSystem source. + :vartype path: str + """ + + _validation = { + "source_type": {"required": True}, + } + + _attribute_map = { + "connection": {"key": "connection", "type": "str"}, + "source_type": {"key": "sourceType", "type": "str"}, + "path": {"key": "path", "type": "str"}, + } + + def __init__(self, *, connection: Optional[str] = None, path: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword connection: Workspace connection for data import source storage. + :paramtype connection: str + :keyword path: Path on data import FileSystem source. + :paramtype path: str + """ + super().__init__(connection=connection, **kwargs) + self.source_type: str = "file_system" + self.path = path + + +class MonitoringInputDataBase(_serialization.Model): + """Monitoring input data base definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + FixedInputData, StaticInputData, TrailingInputData + + All required parameters must be populated in order to send to Azure. + + :ivar columns: Mapping of column names to special uses. + :vartype columns: dict[str, str] + :ivar data_context: The context metadata of the data source. + :vartype data_context: str + :ivar input_data_type: [Required] Specifies the type of signal to monitor. Required. Known + values are: "Static", "Trailing", and "Fixed". + :vartype input_data_type: str or + ~azure.mgmt.machinelearningservices.models.MonitoringInputDataType + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :ivar uri: [Required] Input Asset URI. Required. + :vartype uri: str + """ + + _validation = { + "input_data_type": {"required": True}, + "job_input_type": {"required": True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "columns": {"key": "columns", "type": "{str}"}, + "data_context": {"key": "dataContext", "type": "str"}, + "input_data_type": {"key": "inputDataType", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + } + + _subtype_map = { + "input_data_type": {"Fixed": "FixedInputData", "Static": "StaticInputData", "Trailing": "TrailingInputData"} + } + + def __init__( + self, + *, + job_input_type: Union[str, "_models.JobInputType"], + uri: str, + columns: Optional[Dict[str, str]] = None, + data_context: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword columns: Mapping of column names to special uses. + :paramtype columns: dict[str, str] + :keyword data_context: The context metadata of the data source. + :paramtype data_context: str + :keyword job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". + :paramtype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :keyword uri: [Required] Input Asset URI. Required. + :paramtype uri: str + """ + super().__init__(**kwargs) + self.columns = columns + self.data_context = data_context + self.input_data_type: Optional[str] = None + self.job_input_type = job_input_type + self.uri = uri + + +class FixedInputData(MonitoringInputDataBase): + """Fixed input data definition. + + All required parameters must be populated in order to send to Azure. + + :ivar columns: Mapping of column names to special uses. + :vartype columns: dict[str, str] + :ivar data_context: The context metadata of the data source. + :vartype data_context: str + :ivar input_data_type: [Required] Specifies the type of signal to monitor. Required. Known + values are: "Static", "Trailing", and "Fixed". + :vartype input_data_type: str or + ~azure.mgmt.machinelearningservices.models.MonitoringInputDataType + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :ivar uri: [Required] Input Asset URI. Required. + :vartype uri: str + """ + + _validation = { + "input_data_type": {"required": True}, + "job_input_type": {"required": True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "columns": {"key": "columns", "type": "{str}"}, + "data_context": {"key": "dataContext", "type": "str"}, + "input_data_type": {"key": "inputDataType", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + } + + def __init__( + self, + *, + job_input_type: Union[str, "_models.JobInputType"], + uri: str, + columns: Optional[Dict[str, str]] = None, + data_context: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword columns: Mapping of column names to special uses. + :paramtype columns: dict[str, str] + :keyword data_context: The context metadata of the data source. + :paramtype data_context: str + :keyword job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". + :paramtype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :keyword uri: [Required] Input Asset URI. Required. + :paramtype uri: str + """ + super().__init__(columns=columns, data_context=data_context, job_input_type=job_input_type, uri=uri, **kwargs) + self.input_data_type: str = "Fixed" + + +class FlavorData(_serialization.Model): + """FlavorData. + + :ivar data: Model flavor-specific data. + :vartype data: dict[str, str] + """ + + _attribute_map = { + "data": {"key": "data", "type": "{str}"}, + } + + def __init__(self, *, data: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: + """ + :keyword data: Model flavor-specific data. + :paramtype data: dict[str, str] + """ + super().__init__(**kwargs) + self.data = data + + +class Forecasting(TableVertical, AutoMLVertical): # pylint: disable=too-many-instance-attributes + """Forecasting task in AutoML Table vertical. All required parameters must be populated in order to send to Azure. @@ -10383,10 +13690,31 @@ class ImageClassification(ImageClassificationBase, AutoMLVertical): # pylint: d :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType :ivar training_data: [Required] Training data input. Required. :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar cv_split_column_names: Columns to use for CVSplit data. + :vartype cv_split_column_names: list[str] + :ivar featurization_settings: Featurization inputs needed for AutoML job. + :vartype featurization_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :ivar fixed_parameters: Model/training parameters that will remain constant throughout + training. + :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters + :ivar limit_settings: Execution constraints for AutoMLJob. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings + :ivar n_cross_validations: Number of cross validation folds to be applied on training dataset + when validation dataset is not provided. + :vartype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] + :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings + :ivar test_data: Test data input. + :vartype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar test_data_size: The fraction of test dataset that needs to be set aside for validation + purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype test_data_size: float :ivar validation_data: Validation data inputs. :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :ivar validation_data_size: The fraction of training dataset that needs to be set aside for @@ -10394,24 +13722,24 @@ class ImageClassification(ImageClassificationBase, AutoMLVertical): # pylint: d Values between (0.0 , 1.0) Applied when validation dataset is not provided. :vartype validation_data_size: float - :ivar model_settings: Settings used for training the model. - :vartype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] - :ivar primary_metric: Primary metric to optimize for this task. Known values are: - "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", and - "PrecisionScoreWeighted". + :ivar weight_column_name: The name of the sample weight column. Automated ML supports a + weighted column as an input, causing rows in the data to be weighted up or down. + :vartype weight_column_name: str + :ivar forecasting_settings: Forecasting task specific inputs. + :vartype forecasting_settings: ~azure.mgmt.machinelearningservices.models.ForecastingSettings + :ivar primary_metric: Primary metric for forecasting task. Known values are: + "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", and + "NormalizedMeanAbsoluteError". :vartype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics + ~azure.mgmt.machinelearningservices.models.ForecastingPrimaryMetrics + :ivar training_settings: Inputs for training phase for an AutoML Job. + :vartype training_settings: + ~azure.mgmt.machinelearningservices.models.ForecastingTrainingSettings """ _validation = { "task_type": {"required": True}, "training_data": {"required": True}, - "limit_settings": {"required": True}, } _attribute_map = { @@ -10419,28 +13747,44 @@ class ImageClassification(ImageClassificationBase, AutoMLVertical): # pylint: d "target_column_name": {"key": "targetColumnName", "type": "str"}, "task_type": {"key": "taskType", "type": "str"}, "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, - "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, - "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "cv_split_column_names": {"key": "cvSplitColumnNames", "type": "[str]"}, + "featurization_settings": {"key": "featurizationSettings", "type": "TableVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "TableFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "TableVerticalLimitSettings"}, + "n_cross_validations": {"key": "nCrossValidations", "type": "NCrossValidations"}, + "search_space": {"key": "searchSpace", "type": "[TableParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "TableSweepSettings"}, + "test_data": {"key": "testData", "type": "MLTableJobInput"}, + "test_data_size": {"key": "testDataSize", "type": "float"}, "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, "validation_data_size": {"key": "validationDataSize", "type": "float"}, - "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsClassification"}, - "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsClassification]"}, + "weight_column_name": {"key": "weightColumnName", "type": "str"}, + "forecasting_settings": {"key": "forecastingSettings", "type": "ForecastingSettings"}, "primary_metric": {"key": "primaryMetric", "type": "str"}, + "training_settings": {"key": "trainingSettings", "type": "ForecastingTrainingSettings"}, } def __init__( self, *, training_data: "_models.MLTableJobInput", - limit_settings: "_models.ImageLimitSettings", log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, target_column_name: Optional[str] = None, - sweep_settings: Optional["_models.ImageSweepSettings"] = None, + cv_split_column_names: Optional[List[str]] = None, + featurization_settings: Optional["_models.TableVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.TableFixedParameters"] = None, + limit_settings: Optional["_models.TableVerticalLimitSettings"] = None, + n_cross_validations: Optional["_models.NCrossValidations"] = None, + search_space: Optional[List["_models.TableParameterSubspace"]] = None, + sweep_settings: Optional["_models.TableSweepSettings"] = None, + test_data: Optional["_models.MLTableJobInput"] = None, + test_data_size: Optional[float] = None, validation_data: Optional["_models.MLTableJobInput"] = None, validation_data_size: Optional[float] = None, - model_settings: Optional["_models.ImageModelSettingsClassification"] = None, - search_space: Optional[List["_models.ImageModelDistributionSettingsClassification"]] = None, - primary_metric: Optional[Union[str, "_models.ClassificationPrimaryMetrics"]] = None, + weight_column_name: Optional[str] = None, + forecasting_settings: Optional["_models.ForecastingSettings"] = None, + primary_metric: Optional[Union[str, "_models.ForecastingPrimaryMetrics"]] = None, + training_settings: Optional["_models.ForecastingTrainingSettings"] = None, **kwargs: Any ) -> None: """ @@ -10452,150 +13796,34 @@ def __init__( :paramtype target_column_name: str :keyword training_data: [Required] Training data input. Required. :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. - :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype validation_data_size: float - :keyword model_settings: Settings used for training the model. - :paramtype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification - :keyword search_space: Search space for sampling different combinations of models and their - hyperparameters. - :paramtype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] - :keyword primary_metric: Primary metric to optimize for this task. Known values are: - "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", and - "PrecisionScoreWeighted". - :paramtype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics - """ - super().__init__( - limit_settings=limit_settings, - sweep_settings=sweep_settings, - validation_data=validation_data, - validation_data_size=validation_data_size, - model_settings=model_settings, - search_space=search_space, - log_verbosity=log_verbosity, - target_column_name=target_column_name, - training_data=training_data, - **kwargs - ) - self.log_verbosity = log_verbosity - self.target_column_name = target_column_name - self.task_type: str = "ImageClassification" - self.training_data = training_data - self.primary_metric = primary_metric - self.limit_settings = limit_settings - self.sweep_settings = sweep_settings - self.validation_data = validation_data - self.validation_data_size = validation_data_size - self.model_settings = model_settings - self.search_space = search_space - - -class ImageClassificationMultilabel( - ImageClassificationBase, AutoMLVertical -): # pylint: disable=too-many-instance-attributes - """Image Classification Multilabel. Multi-label image classification is used when an image could - have one or more labels - from a set of labels - e.g. an image could be labeled with both 'cat' and 'dog'. - - All required parameters must be populated in order to send to Azure. - - :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", - "Warning", "Error", and "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: - "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", and "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: [Required] Training data input. Required. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float - :ivar model_settings: Settings used for training the model. - :vartype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] - :ivar primary_metric: Primary metric to optimize for this task. Known values are: - "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", - "PrecisionScoreWeighted", and "IOU". - :vartype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ClassificationMultilabelPrimaryMetrics - """ - - _validation = { - "task_type": {"required": True}, - "training_data": {"required": True}, - "limit_settings": {"required": True}, - } - - _attribute_map = { - "log_verbosity": {"key": "logVerbosity", "type": "str"}, - "target_column_name": {"key": "targetColumnName", "type": "str"}, - "task_type": {"key": "taskType", "type": "str"}, - "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, - "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, - "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, - "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, - "validation_data_size": {"key": "validationDataSize", "type": "float"}, - "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsClassification"}, - "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsClassification]"}, - "primary_metric": {"key": "primaryMetric", "type": "str"}, - } - - def __init__( - self, - *, - training_data: "_models.MLTableJobInput", - limit_settings: "_models.ImageLimitSettings", - log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, - target_column_name: Optional[str] = None, - sweep_settings: Optional["_models.ImageSweepSettings"] = None, - validation_data: Optional["_models.MLTableJobInput"] = None, - validation_data_size: Optional[float] = None, - model_settings: Optional["_models.ImageModelSettingsClassification"] = None, - search_space: Optional[List["_models.ImageModelDistributionSettingsClassification"]] = None, - primary_metric: Optional[Union[str, "_models.ClassificationMultilabelPrimaryMetrics"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", - "Warning", "Error", and "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: [Required] Training data input. Required. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. - :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :keyword cv_split_column_names: Columns to use for CVSplit data. + :paramtype cv_split_column_names: list[str] + :keyword featurization_settings: Featurization inputs needed for AutoML job. + :paramtype featurization_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :keyword fixed_parameters: Model/training parameters that will remain constant throughout + training. + :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters + :keyword limit_settings: Execution constraints for AutoMLJob. + :paramtype limit_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings + :keyword n_cross_validations: Number of cross validation folds to be applied on training + dataset + when validation dataset is not provided. + :paramtype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] + :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings + :keyword test_data: Test data input. + :paramtype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword test_data_size: The fraction of test dataset that needs to be set aside for validation + purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype test_data_size: float :keyword validation_data: Validation data inputs. :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :keyword validation_data_size: The fraction of training dataset that needs to be set aside for @@ -10603,26 +13831,33 @@ def __init__( Values between (0.0 , 1.0) Applied when validation dataset is not provided. :paramtype validation_data_size: float - :keyword model_settings: Settings used for training the model. - :paramtype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification - :keyword search_space: Search space for sampling different combinations of models and their - hyperparameters. - :paramtype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] - :keyword primary_metric: Primary metric to optimize for this task. Known values are: - "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", - "PrecisionScoreWeighted", and "IOU". + :keyword weight_column_name: The name of the sample weight column. Automated ML supports a + weighted column as an input, causing rows in the data to be weighted up or down. + :paramtype weight_column_name: str + :keyword forecasting_settings: Forecasting task specific inputs. + :paramtype forecasting_settings: ~azure.mgmt.machinelearningservices.models.ForecastingSettings + :keyword primary_metric: Primary metric for forecasting task. Known values are: + "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", and + "NormalizedMeanAbsoluteError". :paramtype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ClassificationMultilabelPrimaryMetrics + ~azure.mgmt.machinelearningservices.models.ForecastingPrimaryMetrics + :keyword training_settings: Inputs for training phase for an AutoML Job. + :paramtype training_settings: + ~azure.mgmt.machinelearningservices.models.ForecastingTrainingSettings """ super().__init__( + cv_split_column_names=cv_split_column_names, + featurization_settings=featurization_settings, + fixed_parameters=fixed_parameters, limit_settings=limit_settings, + n_cross_validations=n_cross_validations, + search_space=search_space, sweep_settings=sweep_settings, + test_data=test_data, + test_data_size=test_data_size, validation_data=validation_data, validation_data_size=validation_data_size, - model_settings=model_settings, - search_space=search_space, + weight_column_name=weight_column_name, log_verbosity=log_verbosity, target_column_name=target_column_name, training_data=training_data, @@ -10630,1622 +13865,3603 @@ def __init__( ) self.log_verbosity = log_verbosity self.target_column_name = target_column_name - self.task_type: str = "ImageClassificationMultilabel" + self.task_type: str = "Forecasting" self.training_data = training_data + self.forecasting_settings = forecasting_settings self.primary_metric = primary_metric + self.training_settings = training_settings + self.cv_split_column_names = cv_split_column_names + self.featurization_settings = featurization_settings + self.fixed_parameters = fixed_parameters self.limit_settings = limit_settings + self.n_cross_validations = n_cross_validations + self.search_space = search_space self.sweep_settings = sweep_settings + self.test_data = test_data + self.test_data_size = test_data_size self.validation_data = validation_data self.validation_data_size = validation_data_size - self.model_settings = model_settings - self.search_space = search_space + self.weight_column_name = weight_column_name -class ImageObjectDetectionBase(ImageVertical): - """ImageObjectDetectionBase. +class ForecastingSettings(_serialization.Model): # pylint: disable=too-many-instance-attributes + """Forecasting specific parameters. - All required parameters must be populated in order to send to Azure. - - :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float - :ivar model_settings: Settings used for training the model. - :vartype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] - """ - - _validation = { - "limit_settings": {"required": True}, - } + :ivar country_or_region_for_holidays: Country or region for holidays for forecasting tasks. + These should be ISO 3166 two-letter country/region codes, for example 'US' or 'GB'. + :vartype country_or_region_for_holidays: str + :ivar cv_step_size: Number of periods between the origin time of one CV fold and the next fold. + For + example, if ``CVStepSize`` = 3 for daily data, the origin time for each fold will be + three days apart. + :vartype cv_step_size: int + :ivar feature_lags: Flag for generating lags for the numeric features with 'auto' or null. + Known values are: "None" and "Auto". + :vartype feature_lags: str or ~azure.mgmt.machinelearningservices.models.FeatureLags + :ivar features_unknown_at_forecast_time: The feature columns that are available for training + but unknown at the time of forecast/inference. + If features_unknown_at_forecast_time is not set, it is assumed that all the feature columns in + the dataset are known at inference time. + :vartype features_unknown_at_forecast_time: list[str] + :ivar forecast_horizon: The desired maximum forecast horizon in units of time-series frequency. + :vartype forecast_horizon: ~azure.mgmt.machinelearningservices.models.ForecastHorizon + :ivar frequency: When forecasting, this parameter represents the period with which the forecast + is desired, for example daily, weekly, yearly, etc. The forecast frequency is dataset frequency + by default. + :vartype frequency: str + :ivar seasonality: Set time series seasonality as an integer multiple of the series frequency. + If seasonality is set to 'auto', it will be inferred. + :vartype seasonality: ~azure.mgmt.machinelearningservices.models.Seasonality + :ivar short_series_handling_config: The parameter defining how if AutoML should handle short + time series. Known values are: "None", "Auto", "Pad", and "Drop". + :vartype short_series_handling_config: str or + ~azure.mgmt.machinelearningservices.models.ShortSeriesHandlingConfiguration + :ivar target_aggregate_function: The function to be used to aggregate the time series target + column to conform to a user specified frequency. + If the TargetAggregateFunction is set i.e. not 'None', but the freq parameter is not set, the + error is raised. The possible target aggregation functions are: "sum", "max", "min" and "mean". + Known values are: "None", "Sum", "Max", "Min", and "Mean". + :vartype target_aggregate_function: str or + ~azure.mgmt.machinelearningservices.models.TargetAggregationFunction + :ivar target_lags: The number of past periods to lag from the target column. + :vartype target_lags: ~azure.mgmt.machinelearningservices.models.TargetLags + :ivar target_rolling_window_size: The number of past periods used to create a rolling window + average of the target column. + :vartype target_rolling_window_size: + ~azure.mgmt.machinelearningservices.models.TargetRollingWindowSize + :ivar time_column_name: The name of the time column. This parameter is required when + forecasting to specify the datetime column in the input data used for building the time series + and inferring its frequency. + :vartype time_column_name: str + :ivar time_series_id_column_names: The names of columns used to group a timeseries. It can be + used to create multiple series. + If grain is not defined, the data set is assumed to be one time-series. This parameter is used + with task type forecasting. + :vartype time_series_id_column_names: list[str] + :ivar use_stl: Configure STL Decomposition of the time-series target column. Known values are: + "None", "Season", and "SeasonTrend". + :vartype use_stl: str or ~azure.mgmt.machinelearningservices.models.UseStl + """ _attribute_map = { - "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, - "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, - "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, - "validation_data_size": {"key": "validationDataSize", "type": "float"}, - "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsObjectDetection"}, - "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsObjectDetection]"}, + "country_or_region_for_holidays": {"key": "countryOrRegionForHolidays", "type": "str"}, + "cv_step_size": {"key": "cvStepSize", "type": "int"}, + "feature_lags": {"key": "featureLags", "type": "str"}, + "features_unknown_at_forecast_time": {"key": "featuresUnknownAtForecastTime", "type": "[str]"}, + "forecast_horizon": {"key": "forecastHorizon", "type": "ForecastHorizon"}, + "frequency": {"key": "frequency", "type": "str"}, + "seasonality": {"key": "seasonality", "type": "Seasonality"}, + "short_series_handling_config": {"key": "shortSeriesHandlingConfig", "type": "str"}, + "target_aggregate_function": {"key": "targetAggregateFunction", "type": "str"}, + "target_lags": {"key": "targetLags", "type": "TargetLags"}, + "target_rolling_window_size": {"key": "targetRollingWindowSize", "type": "TargetRollingWindowSize"}, + "time_column_name": {"key": "timeColumnName", "type": "str"}, + "time_series_id_column_names": {"key": "timeSeriesIdColumnNames", "type": "[str]"}, + "use_stl": {"key": "useStl", "type": "str"}, } def __init__( self, *, - limit_settings: "_models.ImageLimitSettings", - sweep_settings: Optional["_models.ImageSweepSettings"] = None, - validation_data: Optional["_models.MLTableJobInput"] = None, - validation_data_size: Optional[float] = None, - model_settings: Optional["_models.ImageModelSettingsObjectDetection"] = None, - search_space: Optional[List["_models.ImageModelDistributionSettingsObjectDetection"]] = None, + country_or_region_for_holidays: Optional[str] = None, + cv_step_size: Optional[int] = None, + feature_lags: Optional[Union[str, "_models.FeatureLags"]] = None, + features_unknown_at_forecast_time: Optional[List[str]] = None, + forecast_horizon: Optional["_models.ForecastHorizon"] = None, + frequency: Optional[str] = None, + seasonality: Optional["_models.Seasonality"] = None, + short_series_handling_config: Optional[Union[str, "_models.ShortSeriesHandlingConfiguration"]] = None, + target_aggregate_function: Optional[Union[str, "_models.TargetAggregationFunction"]] = None, + target_lags: Optional["_models.TargetLags"] = None, + target_rolling_window_size: Optional["_models.TargetRollingWindowSize"] = None, + time_column_name: Optional[str] = None, + time_series_id_column_names: Optional[List[str]] = None, + use_stl: Optional[Union[str, "_models.UseStl"]] = None, **kwargs: Any ) -> None: """ - :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. - :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype validation_data_size: float - :keyword model_settings: Settings used for training the model. - :paramtype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection - :keyword search_space: Search space for sampling different combinations of models and their - hyperparameters. - :paramtype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] + :keyword country_or_region_for_holidays: Country or region for holidays for forecasting tasks. + These should be ISO 3166 two-letter country/region codes, for example 'US' or 'GB'. + :paramtype country_or_region_for_holidays: str + :keyword cv_step_size: Number of periods between the origin time of one CV fold and the next + fold. For + example, if ``CVStepSize`` = 3 for daily data, the origin time for each fold will be + three days apart. + :paramtype cv_step_size: int + :keyword feature_lags: Flag for generating lags for the numeric features with 'auto' or null. + Known values are: "None" and "Auto". + :paramtype feature_lags: str or ~azure.mgmt.machinelearningservices.models.FeatureLags + :keyword features_unknown_at_forecast_time: The feature columns that are available for training + but unknown at the time of forecast/inference. + If features_unknown_at_forecast_time is not set, it is assumed that all the feature columns in + the dataset are known at inference time. + :paramtype features_unknown_at_forecast_time: list[str] + :keyword forecast_horizon: The desired maximum forecast horizon in units of time-series + frequency. + :paramtype forecast_horizon: ~azure.mgmt.machinelearningservices.models.ForecastHorizon + :keyword frequency: When forecasting, this parameter represents the period with which the + forecast is desired, for example daily, weekly, yearly, etc. The forecast frequency is dataset + frequency by default. + :paramtype frequency: str + :keyword seasonality: Set time series seasonality as an integer multiple of the series + frequency. + If seasonality is set to 'auto', it will be inferred. + :paramtype seasonality: ~azure.mgmt.machinelearningservices.models.Seasonality + :keyword short_series_handling_config: The parameter defining how if AutoML should handle short + time series. Known values are: "None", "Auto", "Pad", and "Drop". + :paramtype short_series_handling_config: str or + ~azure.mgmt.machinelearningservices.models.ShortSeriesHandlingConfiguration + :keyword target_aggregate_function: The function to be used to aggregate the time series target + column to conform to a user specified frequency. + If the TargetAggregateFunction is set i.e. not 'None', but the freq parameter is not set, the + error is raised. The possible target aggregation functions are: "sum", "max", "min" and "mean". + Known values are: "None", "Sum", "Max", "Min", and "Mean". + :paramtype target_aggregate_function: str or + ~azure.mgmt.machinelearningservices.models.TargetAggregationFunction + :keyword target_lags: The number of past periods to lag from the target column. + :paramtype target_lags: ~azure.mgmt.machinelearningservices.models.TargetLags + :keyword target_rolling_window_size: The number of past periods used to create a rolling window + average of the target column. + :paramtype target_rolling_window_size: + ~azure.mgmt.machinelearningservices.models.TargetRollingWindowSize + :keyword time_column_name: The name of the time column. This parameter is required when + forecasting to specify the datetime column in the input data used for building the time series + and inferring its frequency. + :paramtype time_column_name: str + :keyword time_series_id_column_names: The names of columns used to group a timeseries. It can + be used to create multiple series. + If grain is not defined, the data set is assumed to be one time-series. This parameter is used + with task type forecasting. + :paramtype time_series_id_column_names: list[str] + :keyword use_stl: Configure STL Decomposition of the time-series target column. Known values + are: "None", "Season", and "SeasonTrend". + :paramtype use_stl: str or ~azure.mgmt.machinelearningservices.models.UseStl """ - super().__init__( - limit_settings=limit_settings, - sweep_settings=sweep_settings, - validation_data=validation_data, - validation_data_size=validation_data_size, - **kwargs - ) - self.model_settings = model_settings - self.search_space = search_space - + super().__init__(**kwargs) + self.country_or_region_for_holidays = country_or_region_for_holidays + self.cv_step_size = cv_step_size + self.feature_lags = feature_lags + self.features_unknown_at_forecast_time = features_unknown_at_forecast_time + self.forecast_horizon = forecast_horizon + self.frequency = frequency + self.seasonality = seasonality + self.short_series_handling_config = short_series_handling_config + self.target_aggregate_function = target_aggregate_function + self.target_lags = target_lags + self.target_rolling_window_size = target_rolling_window_size + self.time_column_name = time_column_name + self.time_series_id_column_names = time_series_id_column_names + self.use_stl = use_stl -class ImageInstanceSegmentation( - ImageObjectDetectionBase, AutoMLVertical -): # pylint: disable=too-many-instance-attributes - """Image Instance Segmentation. Instance segmentation is used to identify objects in an image at - the pixel level, - drawing a polygon around each object in the image. - All required parameters must be populated in order to send to Azure. +class ForecastingTrainingSettings(TrainingSettings): + """Forecasting Training related configuration. - :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", - "Warning", "Error", and "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: - "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", and "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: [Required] Training data input. Required. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float - :ivar model_settings: Settings used for training the model. - :vartype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] - :ivar primary_metric: Primary metric to optimize for this task. "MeanAveragePrecision" - :vartype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.InstanceSegmentationPrimaryMetrics + :ivar enable_dnn_training: Enable recommendation of DNN models. + :vartype enable_dnn_training: bool + :ivar enable_model_explainability: Flag to turn on explainability on best model. + :vartype enable_model_explainability: bool + :ivar enable_onnx_compatible_models: Flag for enabling onnx compatible models. + :vartype enable_onnx_compatible_models: bool + :ivar enable_stack_ensemble: Enable stack ensemble run. + :vartype enable_stack_ensemble: bool + :ivar enable_vote_ensemble: Enable voting ensemble run. + :vartype enable_vote_ensemble: bool + :ivar ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model + generation, multiple fitted models from the previous child runs are downloaded. + Configure this parameter with a higher value than 300 secs, if more time is needed. + :vartype ensemble_model_download_timeout: ~datetime.timedelta + :ivar stack_ensemble_settings: Stack ensemble settings for stack ensemble run. + :vartype stack_ensemble_settings: + ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + :ivar training_mode: TrainingMode mode - Setting to 'auto' is same as setting it to + 'non-distributed' for now, however in the future may result in mixed mode or heuristics based + mode selection. Default is 'auto'. + If 'Distributed' then only distributed featurization is used and distributed algorithms are + chosen. + If 'NonDistributed' then only non distributed algorithms are chosen. Known values are: "Auto", + "Distributed", and "NonDistributed". + :vartype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode + :ivar allowed_training_algorithms: Allowed models for forecasting task. + :vartype allowed_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.ForecastingModels] + :ivar blocked_training_algorithms: Blocked models for forecasting task. + :vartype blocked_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.ForecastingModels] """ - _validation = { - "task_type": {"required": True}, - "training_data": {"required": True}, - "limit_settings": {"required": True}, - } - _attribute_map = { - "log_verbosity": {"key": "logVerbosity", "type": "str"}, - "target_column_name": {"key": "targetColumnName", "type": "str"}, - "task_type": {"key": "taskType", "type": "str"}, - "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, - "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, - "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, - "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, - "validation_data_size": {"key": "validationDataSize", "type": "float"}, - "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsObjectDetection"}, - "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsObjectDetection]"}, - "primary_metric": {"key": "primaryMetric", "type": "str"}, + "enable_dnn_training": {"key": "enableDnnTraining", "type": "bool"}, + "enable_model_explainability": {"key": "enableModelExplainability", "type": "bool"}, + "enable_onnx_compatible_models": {"key": "enableOnnxCompatibleModels", "type": "bool"}, + "enable_stack_ensemble": {"key": "enableStackEnsemble", "type": "bool"}, + "enable_vote_ensemble": {"key": "enableVoteEnsemble", "type": "bool"}, + "ensemble_model_download_timeout": {"key": "ensembleModelDownloadTimeout", "type": "duration"}, + "stack_ensemble_settings": {"key": "stackEnsembleSettings", "type": "StackEnsembleSettings"}, + "training_mode": {"key": "trainingMode", "type": "str"}, + "allowed_training_algorithms": {"key": "allowedTrainingAlgorithms", "type": "[str]"}, + "blocked_training_algorithms": {"key": "blockedTrainingAlgorithms", "type": "[str]"}, } def __init__( self, *, - training_data: "_models.MLTableJobInput", - limit_settings: "_models.ImageLimitSettings", - log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, - target_column_name: Optional[str] = None, - sweep_settings: Optional["_models.ImageSweepSettings"] = None, - validation_data: Optional["_models.MLTableJobInput"] = None, - validation_data_size: Optional[float] = None, - model_settings: Optional["_models.ImageModelSettingsObjectDetection"] = None, - search_space: Optional[List["_models.ImageModelDistributionSettingsObjectDetection"]] = None, - primary_metric: Optional[Union[str, "_models.InstanceSegmentationPrimaryMetrics"]] = None, + enable_dnn_training: bool = False, + enable_model_explainability: bool = True, + enable_onnx_compatible_models: bool = False, + enable_stack_ensemble: bool = True, + enable_vote_ensemble: bool = True, + ensemble_model_download_timeout: datetime.timedelta = "PT5M", + stack_ensemble_settings: Optional["_models.StackEnsembleSettings"] = None, + training_mode: Optional[Union[str, "_models.TrainingMode"]] = None, + allowed_training_algorithms: Optional[List[Union[str, "_models.ForecastingModels"]]] = None, + blocked_training_algorithms: Optional[List[Union[str, "_models.ForecastingModels"]]] = None, **kwargs: Any ) -> None: """ - :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", - "Warning", "Error", and "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: [Required] Training data input. Required. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. - :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype validation_data_size: float - :keyword model_settings: Settings used for training the model. - :paramtype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection - :keyword search_space: Search space for sampling different combinations of models and their - hyperparameters. - :paramtype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] - :keyword primary_metric: Primary metric to optimize for this task. "MeanAveragePrecision" - :paramtype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.InstanceSegmentationPrimaryMetrics + :keyword enable_dnn_training: Enable recommendation of DNN models. + :paramtype enable_dnn_training: bool + :keyword enable_model_explainability: Flag to turn on explainability on best model. + :paramtype enable_model_explainability: bool + :keyword enable_onnx_compatible_models: Flag for enabling onnx compatible models. + :paramtype enable_onnx_compatible_models: bool + :keyword enable_stack_ensemble: Enable stack ensemble run. + :paramtype enable_stack_ensemble: bool + :keyword enable_vote_ensemble: Enable voting ensemble run. + :paramtype enable_vote_ensemble: bool + :keyword ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model + generation, multiple fitted models from the previous child runs are downloaded. + Configure this parameter with a higher value than 300 secs, if more time is needed. + :paramtype ensemble_model_download_timeout: ~datetime.timedelta + :keyword stack_ensemble_settings: Stack ensemble settings for stack ensemble run. + :paramtype stack_ensemble_settings: + ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + :keyword training_mode: TrainingMode mode - Setting to 'auto' is same as setting it to + 'non-distributed' for now, however in the future may result in mixed mode or heuristics based + mode selection. Default is 'auto'. + If 'Distributed' then only distributed featurization is used and distributed algorithms are + chosen. + If 'NonDistributed' then only non distributed algorithms are chosen. Known values are: "Auto", + "Distributed", and "NonDistributed". + :paramtype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode + :keyword allowed_training_algorithms: Allowed models for forecasting task. + :paramtype allowed_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.ForecastingModels] + :keyword blocked_training_algorithms: Blocked models for forecasting task. + :paramtype blocked_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.ForecastingModels] """ super().__init__( - limit_settings=limit_settings, - sweep_settings=sweep_settings, - validation_data=validation_data, - validation_data_size=validation_data_size, - model_settings=model_settings, - search_space=search_space, - log_verbosity=log_verbosity, - target_column_name=target_column_name, - training_data=training_data, + enable_dnn_training=enable_dnn_training, + enable_model_explainability=enable_model_explainability, + enable_onnx_compatible_models=enable_onnx_compatible_models, + enable_stack_ensemble=enable_stack_ensemble, + enable_vote_ensemble=enable_vote_ensemble, + ensemble_model_download_timeout=ensemble_model_download_timeout, + stack_ensemble_settings=stack_ensemble_settings, + training_mode=training_mode, **kwargs ) - self.log_verbosity = log_verbosity - self.target_column_name = target_column_name - self.task_type: str = "ImageInstanceSegmentation" - self.training_data = training_data - self.primary_metric = primary_metric - self.limit_settings = limit_settings - self.sweep_settings = sweep_settings - self.validation_data = validation_data - self.validation_data_size = validation_data_size - self.model_settings = model_settings - self.search_space = search_space + self.allowed_training_algorithms = allowed_training_algorithms + self.blocked_training_algorithms = blocked_training_algorithms -class ImageLimitSettings(_serialization.Model): - """Limit settings for the AutoML job. +class FQDNEndpoint(_serialization.Model): + """FQDNEndpoint. - :ivar max_concurrent_trials: Maximum number of concurrent AutoML iterations. - :vartype max_concurrent_trials: int - :ivar max_trials: Maximum number of AutoML iterations. - :vartype max_trials: int - :ivar timeout: AutoML job timeout. - :vartype timeout: ~datetime.timedelta + :ivar domain_name: + :vartype domain_name: str + :ivar endpoint_details: + :vartype endpoint_details: list[~azure.mgmt.machinelearningservices.models.FQDNEndpointDetail] """ _attribute_map = { - "max_concurrent_trials": {"key": "maxConcurrentTrials", "type": "int"}, - "max_trials": {"key": "maxTrials", "type": "int"}, - "timeout": {"key": "timeout", "type": "duration"}, + "domain_name": {"key": "domainName", "type": "str"}, + "endpoint_details": {"key": "endpointDetails", "type": "[FQDNEndpointDetail]"}, } def __init__( - self, *, max_concurrent_trials: int = 1, max_trials: int = 1, timeout: datetime.timedelta = "P7D", **kwargs: Any + self, + *, + domain_name: Optional[str] = None, + endpoint_details: Optional[List["_models.FQDNEndpointDetail"]] = None, + **kwargs: Any ) -> None: """ - :keyword max_concurrent_trials: Maximum number of concurrent AutoML iterations. - :paramtype max_concurrent_trials: int - :keyword max_trials: Maximum number of AutoML iterations. - :paramtype max_trials: int - :keyword timeout: AutoML job timeout. - :paramtype timeout: ~datetime.timedelta + :keyword domain_name: + :paramtype domain_name: str + :keyword endpoint_details: + :paramtype endpoint_details: + list[~azure.mgmt.machinelearningservices.models.FQDNEndpointDetail] """ super().__init__(**kwargs) - self.max_concurrent_trials = max_concurrent_trials - self.max_trials = max_trials - self.timeout = timeout + self.domain_name = domain_name + self.endpoint_details = endpoint_details -class ImageMetadata(_serialization.Model): - """Returns metadata about the operating system image for this compute instance. +class FQDNEndpointDetail(_serialization.Model): + """FQDNEndpointDetail. - :ivar current_image_version: Specifies the current operating system image version this compute - instance is running on. - :vartype current_image_version: str - :ivar latest_image_version: Specifies the latest available operating system image version. - :vartype latest_image_version: str - :ivar is_latest_os_image_version: Specifies whether this compute instance is running on the - latest operating system image. - :vartype is_latest_os_image_version: bool + :ivar port: + :vartype port: int """ _attribute_map = { - "current_image_version": {"key": "currentImageVersion", "type": "str"}, - "latest_image_version": {"key": "latestImageVersion", "type": "str"}, - "is_latest_os_image_version": {"key": "isLatestOsImageVersion", "type": "bool"}, + "port": {"key": "port", "type": "int"}, + } + + def __init__(self, *, port: Optional[int] = None, **kwargs: Any) -> None: + """ + :keyword port: + :paramtype port: int + """ + super().__init__(**kwargs) + self.port = port + + +class FQDNEndpoints(_serialization.Model): + """FQDNEndpoints. + + :ivar category: + :vartype category: str + :ivar endpoints: + :vartype endpoints: list[~azure.mgmt.machinelearningservices.models.FQDNEndpoint] + """ + + _attribute_map = { + "category": {"key": "category", "type": "str"}, + "endpoints": {"key": "endpoints", "type": "[FQDNEndpoint]"}, + } + + def __init__( + self, *, category: Optional[str] = None, endpoints: Optional[List["_models.FQDNEndpoint"]] = None, **kwargs: Any + ) -> None: + """ + :keyword category: + :paramtype category: str + :keyword endpoints: + :paramtype endpoints: list[~azure.mgmt.machinelearningservices.models.FQDNEndpoint] + """ + super().__init__(**kwargs) + self.category = category + self.endpoints = endpoints + + +class FQDNEndpointsPropertyBag(_serialization.Model): + """Property bag for FQDN endpoints result. + + :ivar properties: + :vartype properties: ~azure.mgmt.machinelearningservices.models.FQDNEndpoints + """ + + _attribute_map = { + "properties": {"key": "properties", "type": "FQDNEndpoints"}, + } + + def __init__(self, *, properties: Optional["_models.FQDNEndpoints"] = None, **kwargs: Any) -> None: + """ + :keyword properties: + :paramtype properties: ~azure.mgmt.machinelearningservices.models.FQDNEndpoints + """ + super().__init__(**kwargs) + self.properties = properties + + +class OutboundRule(_serialization.Model): + """Outbound Rule for the managed network of a machine learning workspace. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + FqdnOutboundRule, PrivateEndpointOutboundRule, ServiceTagOutboundRule + + All required parameters must be populated in order to send to Azure. + + :ivar category: Category of a managed network Outbound Rule of a machine learning workspace. + Known values are: "Required", "Recommended", and "UserDefined". + :vartype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory + :ivar status: Type of a managed network Outbound Rule of a machine learning workspace. Known + values are: "Inactive" and "Active". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus + :ivar type: Type of a managed network Outbound Rule of a machine learning workspace. Required. + Known values are: "FQDN", "PrivateEndpoint", and "ServiceTag". + :vartype type: str or ~azure.mgmt.machinelearningservices.models.RuleType + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "category": {"key": "category", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "type": {"key": "type", "type": "str"}, + } + + _subtype_map = { + "type": { + "FQDN": "FqdnOutboundRule", + "PrivateEndpoint": "PrivateEndpointOutboundRule", + "ServiceTag": "ServiceTagOutboundRule", + } } def __init__( self, *, - current_image_version: Optional[str] = None, - latest_image_version: Optional[str] = None, - is_latest_os_image_version: Optional[bool] = None, + category: Optional[Union[str, "_models.RuleCategory"]] = None, + status: Optional[Union[str, "_models.RuleStatus"]] = None, **kwargs: Any ) -> None: """ - :keyword current_image_version: Specifies the current operating system image version this - compute instance is running on. - :paramtype current_image_version: str - :keyword latest_image_version: Specifies the latest available operating system image version. - :paramtype latest_image_version: str - :keyword is_latest_os_image_version: Specifies whether this compute instance is running on the - latest operating system image. - :paramtype is_latest_os_image_version: bool + :keyword category: Category of a managed network Outbound Rule of a machine learning workspace. + Known values are: "Required", "Recommended", and "UserDefined". + :paramtype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory + :keyword status: Type of a managed network Outbound Rule of a machine learning workspace. Known + values are: "Inactive" and "Active". + :paramtype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus """ super().__init__(**kwargs) - self.current_image_version = current_image_version - self.latest_image_version = latest_image_version - self.is_latest_os_image_version = is_latest_os_image_version + self.category = category + self.status = status + self.type: Optional[str] = None -class ImageModelDistributionSettings(_serialization.Model): # pylint: disable=too-many-instance-attributes - """Distribution expressions to sweep over values of model settings. +class FqdnOutboundRule(OutboundRule): + """FQDN Outbound Rule for the managed network of a machine learning workspace. - :code:` - Some examples are: - ``` - ModelName = "choice('seresnext', 'resnest50')"; - LearningRate = "uniform(0.001, 0.01)"; - LayersToFreeze = "choice(0, 2)"; - ```` - All distributions can be specified as distribution_name(min, max) or choice(val1, val2, ..., - valn) - where distribution name can be: uniform, quniform, loguniform, etc - For more details on how to compose distribution expressions please check the documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters - For more information on the available settings please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + All required parameters must be populated in order to send to Azure. - :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :vartype ams_gradient: str - :ivar augmentations: Settings for using Augmentations. - :vartype augmentations: str - :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta1: str - :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta2: str - :ivar distributed: Whether to use distributer training. - :vartype distributed: str - :ivar early_stopping: Enable early stopping logic during training. - :vartype early_stopping: str - :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before - primary metric improvement - is tracked for early stopping. Must be a positive integer. - :vartype early_stopping_delay: str - :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :vartype early_stopping_patience: str - :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. - :vartype enable_onnx_normalization: str - :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must - be a positive integer. - :vartype evaluation_frequency: str - :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :vartype gradient_accumulation_step: str - :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype layers_to_freeze: str - :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :vartype learning_rate: str - :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. - :vartype learning_rate_scheduler: str - :ivar model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype model_name: str - :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. - :vartype momentum: str - :ivar nesterov: Enable nesterov when optimizer is 'sgd'. - :vartype nesterov: str - :ivar number_of_epochs: Number of training epochs. Must be a positive integer. - :vartype number_of_epochs: str - :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. - :vartype number_of_workers: str - :ivar optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. - :vartype optimizer: str - :ivar random_seed: Random seed to be used when using deterministic training. - :vartype random_seed: str - :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in - the range [0, 1]. - :vartype step_lr_gamma: str - :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a - positive integer. - :vartype step_lr_step_size: str - :ivar training_batch_size: Training batch size. Must be a positive integer. - :vartype training_batch_size: str - :ivar validation_batch_size: Validation batch size. Must be a positive integer. - :vartype validation_batch_size: str - :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :vartype warmup_cosine_lr_cycles: str - :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :vartype warmup_cosine_lr_warmup_epochs: str - :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be - a float in the range[0, 1]. - :vartype weight_decay: str + :ivar category: Category of a managed network Outbound Rule of a machine learning workspace. + Known values are: "Required", "Recommended", and "UserDefined". + :vartype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory + :ivar status: Type of a managed network Outbound Rule of a machine learning workspace. Known + values are: "Inactive" and "Active". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus + :ivar type: Type of a managed network Outbound Rule of a machine learning workspace. Required. + Known values are: "FQDN", "PrivateEndpoint", and "ServiceTag". + :vartype type: str or ~azure.mgmt.machinelearningservices.models.RuleType + :ivar destination: + :vartype destination: str """ + _validation = { + "type": {"required": True}, + } + _attribute_map = { - "ams_gradient": {"key": "amsGradient", "type": "str"}, - "augmentations": {"key": "augmentations", "type": "str"}, - "beta1": {"key": "beta1", "type": "str"}, - "beta2": {"key": "beta2", "type": "str"}, - "distributed": {"key": "distributed", "type": "str"}, - "early_stopping": {"key": "earlyStopping", "type": "str"}, - "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "str"}, - "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "str"}, - "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "str"}, - "evaluation_frequency": {"key": "evaluationFrequency", "type": "str"}, - "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "str"}, - "layers_to_freeze": {"key": "layersToFreeze", "type": "str"}, - "learning_rate": {"key": "learningRate", "type": "str"}, - "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, - "model_name": {"key": "modelName", "type": "str"}, - "momentum": {"key": "momentum", "type": "str"}, - "nesterov": {"key": "nesterov", "type": "str"}, - "number_of_epochs": {"key": "numberOfEpochs", "type": "str"}, - "number_of_workers": {"key": "numberOfWorkers", "type": "str"}, - "optimizer": {"key": "optimizer", "type": "str"}, - "random_seed": {"key": "randomSeed", "type": "str"}, - "step_lr_gamma": {"key": "stepLRGamma", "type": "str"}, - "step_lr_step_size": {"key": "stepLRStepSize", "type": "str"}, - "training_batch_size": {"key": "trainingBatchSize", "type": "str"}, - "validation_batch_size": {"key": "validationBatchSize", "type": "str"}, - "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "str"}, - "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "str"}, - "weight_decay": {"key": "weightDecay", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "destination": {"key": "destination", "type": "str"}, } - def __init__( # pylint: disable=too-many-locals + def __init__( self, *, - ams_gradient: Optional[str] = None, - augmentations: Optional[str] = None, - beta1: Optional[str] = None, - beta2: Optional[str] = None, - distributed: Optional[str] = None, - early_stopping: Optional[str] = None, - early_stopping_delay: Optional[str] = None, - early_stopping_patience: Optional[str] = None, - enable_onnx_normalization: Optional[str] = None, - evaluation_frequency: Optional[str] = None, - gradient_accumulation_step: Optional[str] = None, - layers_to_freeze: Optional[str] = None, - learning_rate: Optional[str] = None, - learning_rate_scheduler: Optional[str] = None, - model_name: Optional[str] = None, - momentum: Optional[str] = None, - nesterov: Optional[str] = None, - number_of_epochs: Optional[str] = None, - number_of_workers: Optional[str] = None, - optimizer: Optional[str] = None, - random_seed: Optional[str] = None, - step_lr_gamma: Optional[str] = None, - step_lr_step_size: Optional[str] = None, - training_batch_size: Optional[str] = None, - validation_batch_size: Optional[str] = None, - warmup_cosine_lr_cycles: Optional[str] = None, - warmup_cosine_lr_warmup_epochs: Optional[str] = None, - weight_decay: Optional[str] = None, + category: Optional[Union[str, "_models.RuleCategory"]] = None, + status: Optional[Union[str, "_models.RuleStatus"]] = None, + destination: Optional[str] = None, **kwargs: Any ) -> None: """ - :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :paramtype ams_gradient: str - :keyword augmentations: Settings for using Augmentations. - :paramtype augmentations: str - :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta1: str - :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta2: str - :keyword distributed: Whether to use distributer training. - :paramtype distributed: str - :keyword early_stopping: Enable early stopping logic during training. - :paramtype early_stopping: str - :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait - before primary metric improvement - is tracked for early stopping. Must be a positive integer. - :paramtype early_stopping_delay: str - :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :paramtype early_stopping_patience: str - :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. - :paramtype enable_onnx_normalization: str - :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. - Must be a positive integer. - :paramtype evaluation_frequency: str - :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :paramtype gradient_accumulation_step: str - :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive - integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype layers_to_freeze: str - :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :paramtype learning_rate: str - :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. - :paramtype learning_rate_scheduler: str - :keyword model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype model_name: str - :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, - 1]. - :paramtype momentum: str - :keyword nesterov: Enable nesterov when optimizer is 'sgd'. - :paramtype nesterov: str - :keyword number_of_epochs: Number of training epochs. Must be a positive integer. - :paramtype number_of_epochs: str - :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. - :paramtype number_of_workers: str - :keyword optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. - :paramtype optimizer: str - :keyword random_seed: Random seed to be used when using deterministic training. - :paramtype random_seed: str - :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float - in the range [0, 1]. - :paramtype step_lr_gamma: str - :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be - a positive integer. - :paramtype step_lr_step_size: str - :keyword training_batch_size: Training batch size. Must be a positive integer. - :paramtype training_batch_size: str - :keyword validation_batch_size: Validation batch size. Must be a positive integer. - :paramtype validation_batch_size: str - :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :paramtype warmup_cosine_lr_cycles: str - :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :paramtype warmup_cosine_lr_warmup_epochs: str - :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must - be a float in the range[0, 1]. - :paramtype weight_decay: str + :keyword category: Category of a managed network Outbound Rule of a machine learning workspace. + Known values are: "Required", "Recommended", and "UserDefined". + :paramtype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory + :keyword status: Type of a managed network Outbound Rule of a machine learning workspace. Known + values are: "Inactive" and "Active". + :paramtype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus + :keyword destination: + :paramtype destination: str """ - super().__init__(**kwargs) - self.ams_gradient = ams_gradient - self.augmentations = augmentations - self.beta1 = beta1 - self.beta2 = beta2 - self.distributed = distributed - self.early_stopping = early_stopping - self.early_stopping_delay = early_stopping_delay - self.early_stopping_patience = early_stopping_patience - self.enable_onnx_normalization = enable_onnx_normalization - self.evaluation_frequency = evaluation_frequency - self.gradient_accumulation_step = gradient_accumulation_step - self.layers_to_freeze = layers_to_freeze - self.learning_rate = learning_rate - self.learning_rate_scheduler = learning_rate_scheduler - self.model_name = model_name - self.momentum = momentum - self.nesterov = nesterov - self.number_of_epochs = number_of_epochs - self.number_of_workers = number_of_workers - self.optimizer = optimizer - self.random_seed = random_seed - self.step_lr_gamma = step_lr_gamma - self.step_lr_step_size = step_lr_step_size - self.training_batch_size = training_batch_size - self.validation_batch_size = validation_batch_size - self.warmup_cosine_lr_cycles = warmup_cosine_lr_cycles - self.warmup_cosine_lr_warmup_epochs = warmup_cosine_lr_warmup_epochs - self.weight_decay = weight_decay + super().__init__(category=category, status=status, **kwargs) + self.type: str = "FQDN" + self.destination = destination -class ImageModelDistributionSettingsClassification( - ImageModelDistributionSettings -): # pylint: disable=too-many-instance-attributes - """Distribution expressions to sweep over values of model settings. +class GenerationSafetyQualityMetricThreshold(_serialization.Model): + """Generation safety quality metric threshold definition. - :code:` - Some examples are: - ``` - ModelName = "choice('seresnext', 'resnest50')"; - LearningRate = "uniform(0.001, 0.01)"; - LayersToFreeze = "choice(0, 2)"; - ```` - For more details on how to compose distribution expressions please check the documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters - For more information on the available settings please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + All required parameters must be populated in order to send to Azure. - :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :vartype ams_gradient: str - :ivar augmentations: Settings for using Augmentations. - :vartype augmentations: str - :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta1: str - :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta2: str - :ivar distributed: Whether to use distributer training. - :vartype distributed: str - :ivar early_stopping: Enable early stopping logic during training. - :vartype early_stopping: str - :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before - primary metric improvement - is tracked for early stopping. Must be a positive integer. - :vartype early_stopping_delay: str - :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :vartype early_stopping_patience: str - :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. - :vartype enable_onnx_normalization: str - :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must - be a positive integer. - :vartype evaluation_frequency: str - :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :vartype gradient_accumulation_step: str - :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype layers_to_freeze: str - :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :vartype learning_rate: str - :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. - :vartype learning_rate_scheduler: str - :ivar model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype model_name: str - :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. - :vartype momentum: str - :ivar nesterov: Enable nesterov when optimizer is 'sgd'. - :vartype nesterov: str - :ivar number_of_epochs: Number of training epochs. Must be a positive integer. - :vartype number_of_epochs: str - :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. - :vartype number_of_workers: str - :ivar optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. - :vartype optimizer: str - :ivar random_seed: Random seed to be used when using deterministic training. - :vartype random_seed: str - :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in - the range [0, 1]. - :vartype step_lr_gamma: str - :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a - positive integer. - :vartype step_lr_step_size: str - :ivar training_batch_size: Training batch size. Must be a positive integer. - :vartype training_batch_size: str - :ivar validation_batch_size: Validation batch size. Must be a positive integer. - :vartype validation_batch_size: str - :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :vartype warmup_cosine_lr_cycles: str - :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :vartype warmup_cosine_lr_warmup_epochs: str - :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be - a float in the range[0, 1]. - :vartype weight_decay: str - :ivar training_crop_size: Image crop size that is input to the neural network for the training - dataset. Must be a positive integer. - :vartype training_crop_size: str - :ivar validation_crop_size: Image crop size that is input to the neural network for the - validation dataset. Must be a positive integer. - :vartype validation_crop_size: str - :ivar validation_resize_size: Image size to which to resize before cropping for validation - dataset. Must be a positive integer. - :vartype validation_resize_size: str - :ivar weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. - 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be - 0 or 1 or 2. - :vartype weighted_loss: str + :ivar metric: [Required] Gets or sets the feature attribution metric to calculate. Required. + Known values are: "AcceptableGroundednessScorePerInstance", "AggregatedGroundednessPassRate", + "AcceptableCoherenceScorePerInstance", "AggregatedCoherencePassRate", + "AcceptableFluencyScorePerInstance", "AggregatedFluencyPassRate", + "AcceptableSimilarityScorePerInstance", "AggregatedSimilarityPassRate", + "AcceptableRelevanceScorePerInstance", and "AggregatedRelevancePassRate". + :vartype metric: str or + ~azure.mgmt.machinelearningservices.models.GenerationSafetyQualityMetric + :ivar threshold: Gets or sets the threshold value. + If null, a default value will be set depending on the selected metric. + :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold """ + _validation = { + "metric": {"required": True}, + } + _attribute_map = { - "ams_gradient": {"key": "amsGradient", "type": "str"}, - "augmentations": {"key": "augmentations", "type": "str"}, - "beta1": {"key": "beta1", "type": "str"}, - "beta2": {"key": "beta2", "type": "str"}, - "distributed": {"key": "distributed", "type": "str"}, - "early_stopping": {"key": "earlyStopping", "type": "str"}, - "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "str"}, - "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "str"}, - "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "str"}, - "evaluation_frequency": {"key": "evaluationFrequency", "type": "str"}, - "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "str"}, - "layers_to_freeze": {"key": "layersToFreeze", "type": "str"}, - "learning_rate": {"key": "learningRate", "type": "str"}, - "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, - "model_name": {"key": "modelName", "type": "str"}, - "momentum": {"key": "momentum", "type": "str"}, - "nesterov": {"key": "nesterov", "type": "str"}, - "number_of_epochs": {"key": "numberOfEpochs", "type": "str"}, - "number_of_workers": {"key": "numberOfWorkers", "type": "str"}, - "optimizer": {"key": "optimizer", "type": "str"}, - "random_seed": {"key": "randomSeed", "type": "str"}, - "step_lr_gamma": {"key": "stepLRGamma", "type": "str"}, - "step_lr_step_size": {"key": "stepLRStepSize", "type": "str"}, - "training_batch_size": {"key": "trainingBatchSize", "type": "str"}, - "validation_batch_size": {"key": "validationBatchSize", "type": "str"}, - "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "str"}, - "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "str"}, - "weight_decay": {"key": "weightDecay", "type": "str"}, - "training_crop_size": {"key": "trainingCropSize", "type": "str"}, - "validation_crop_size": {"key": "validationCropSize", "type": "str"}, - "validation_resize_size": {"key": "validationResizeSize", "type": "str"}, - "weighted_loss": {"key": "weightedLoss", "type": "str"}, + "metric": {"key": "metric", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, } - def __init__( # pylint: disable=too-many-locals + def __init__( self, *, - ams_gradient: Optional[str] = None, - augmentations: Optional[str] = None, - beta1: Optional[str] = None, - beta2: Optional[str] = None, - distributed: Optional[str] = None, - early_stopping: Optional[str] = None, - early_stopping_delay: Optional[str] = None, - early_stopping_patience: Optional[str] = None, - enable_onnx_normalization: Optional[str] = None, - evaluation_frequency: Optional[str] = None, - gradient_accumulation_step: Optional[str] = None, - layers_to_freeze: Optional[str] = None, - learning_rate: Optional[str] = None, - learning_rate_scheduler: Optional[str] = None, - model_name: Optional[str] = None, - momentum: Optional[str] = None, - nesterov: Optional[str] = None, - number_of_epochs: Optional[str] = None, - number_of_workers: Optional[str] = None, - optimizer: Optional[str] = None, - random_seed: Optional[str] = None, - step_lr_gamma: Optional[str] = None, - step_lr_step_size: Optional[str] = None, - training_batch_size: Optional[str] = None, - validation_batch_size: Optional[str] = None, - warmup_cosine_lr_cycles: Optional[str] = None, - warmup_cosine_lr_warmup_epochs: Optional[str] = None, - weight_decay: Optional[str] = None, - training_crop_size: Optional[str] = None, - validation_crop_size: Optional[str] = None, - validation_resize_size: Optional[str] = None, - weighted_loss: Optional[str] = None, + metric: Union[str, "_models.GenerationSafetyQualityMetric"], + threshold: Optional["_models.MonitoringThreshold"] = None, **kwargs: Any ) -> None: """ - :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :paramtype ams_gradient: str - :keyword augmentations: Settings for using Augmentations. - :paramtype augmentations: str - :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta1: str - :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta2: str - :keyword distributed: Whether to use distributer training. - :paramtype distributed: str - :keyword early_stopping: Enable early stopping logic during training. - :paramtype early_stopping: str - :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait - before primary metric improvement - is tracked for early stopping. Must be a positive integer. - :paramtype early_stopping_delay: str - :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :paramtype early_stopping_patience: str - :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. - :paramtype enable_onnx_normalization: str - :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. - Must be a positive integer. - :paramtype evaluation_frequency: str - :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :paramtype gradient_accumulation_step: str - :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive - integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype layers_to_freeze: str - :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :paramtype learning_rate: str - :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. - :paramtype learning_rate_scheduler: str - :keyword model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype model_name: str - :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, - 1]. - :paramtype momentum: str - :keyword nesterov: Enable nesterov when optimizer is 'sgd'. - :paramtype nesterov: str - :keyword number_of_epochs: Number of training epochs. Must be a positive integer. - :paramtype number_of_epochs: str - :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. - :paramtype number_of_workers: str - :keyword optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. - :paramtype optimizer: str - :keyword random_seed: Random seed to be used when using deterministic training. - :paramtype random_seed: str - :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float - in the range [0, 1]. - :paramtype step_lr_gamma: str - :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be - a positive integer. - :paramtype step_lr_step_size: str - :keyword training_batch_size: Training batch size. Must be a positive integer. - :paramtype training_batch_size: str - :keyword validation_batch_size: Validation batch size. Must be a positive integer. - :paramtype validation_batch_size: str - :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :paramtype warmup_cosine_lr_cycles: str - :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :paramtype warmup_cosine_lr_warmup_epochs: str - :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must - be a float in the range[0, 1]. - :paramtype weight_decay: str - :keyword training_crop_size: Image crop size that is input to the neural network for the - training dataset. Must be a positive integer. - :paramtype training_crop_size: str - :keyword validation_crop_size: Image crop size that is input to the neural network for the - validation dataset. Must be a positive integer. - :paramtype validation_crop_size: str - :keyword validation_resize_size: Image size to which to resize before cropping for validation - dataset. Must be a positive integer. - :paramtype validation_resize_size: str - :keyword weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. - 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be - 0 or 1 or 2. - :paramtype weighted_loss: str + :keyword metric: [Required] Gets or sets the feature attribution metric to calculate. Required. + Known values are: "AcceptableGroundednessScorePerInstance", "AggregatedGroundednessPassRate", + "AcceptableCoherenceScorePerInstance", "AggregatedCoherencePassRate", + "AcceptableFluencyScorePerInstance", "AggregatedFluencyPassRate", + "AcceptableSimilarityScorePerInstance", "AggregatedSimilarityPassRate", + "AcceptableRelevanceScorePerInstance", and "AggregatedRelevancePassRate". + :paramtype metric: str or + ~azure.mgmt.machinelearningservices.models.GenerationSafetyQualityMetric + :keyword threshold: Gets or sets the threshold value. + If null, a default value will be set depending on the selected metric. + :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold """ - super().__init__( - ams_gradient=ams_gradient, - augmentations=augmentations, - beta1=beta1, - beta2=beta2, - distributed=distributed, - early_stopping=early_stopping, - early_stopping_delay=early_stopping_delay, - early_stopping_patience=early_stopping_patience, - enable_onnx_normalization=enable_onnx_normalization, - evaluation_frequency=evaluation_frequency, - gradient_accumulation_step=gradient_accumulation_step, - layers_to_freeze=layers_to_freeze, - learning_rate=learning_rate, - learning_rate_scheduler=learning_rate_scheduler, - model_name=model_name, - momentum=momentum, - nesterov=nesterov, - number_of_epochs=number_of_epochs, - number_of_workers=number_of_workers, - optimizer=optimizer, - random_seed=random_seed, - step_lr_gamma=step_lr_gamma, - step_lr_step_size=step_lr_step_size, - training_batch_size=training_batch_size, - validation_batch_size=validation_batch_size, - warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, - warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, - weight_decay=weight_decay, - **kwargs - ) - self.training_crop_size = training_crop_size - self.validation_crop_size = validation_crop_size - self.validation_resize_size = validation_resize_size - self.weighted_loss = weighted_loss + super().__init__(**kwargs) + self.metric = metric + self.threshold = threshold -class ImageModelDistributionSettingsObjectDetection( - ImageModelDistributionSettings -): # pylint: disable=too-many-instance-attributes - """Distribution expressions to sweep over values of model settings. +class GenerationSafetyQualityMonitoringSignal(MonitoringSignalBase): + """Generation safety quality monitoring signal definition. - :code:` - Some examples are: - ``` - ModelName = "choice('seresnext', 'resnest50')"; - LearningRate = "uniform(0.001, 0.01)"; - LayersToFreeze = "choice(0, 2)"; - ```` - For more details on how to compose distribution expressions please check the documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters - For more information on the available settings please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + All required parameters must be populated in order to send to Azure. - :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :vartype ams_gradient: str - :ivar augmentations: Settings for using Augmentations. - :vartype augmentations: str - :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta1: str - :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta2: str - :ivar distributed: Whether to use distributer training. - :vartype distributed: str - :ivar early_stopping: Enable early stopping logic during training. - :vartype early_stopping: str - :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before - primary metric improvement - is tracked for early stopping. Must be a positive integer. - :vartype early_stopping_delay: str - :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :vartype early_stopping_patience: str - :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. - :vartype enable_onnx_normalization: str - :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must - be a positive integer. - :vartype evaluation_frequency: str - :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :vartype gradient_accumulation_step: str - :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype layers_to_freeze: str - :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :vartype learning_rate: str - :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. - :vartype learning_rate_scheduler: str - :ivar model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype model_name: str - :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. - :vartype momentum: str - :ivar nesterov: Enable nesterov when optimizer is 'sgd'. - :vartype nesterov: str - :ivar number_of_epochs: Number of training epochs. Must be a positive integer. - :vartype number_of_epochs: str - :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. - :vartype number_of_workers: str - :ivar optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. - :vartype optimizer: str - :ivar random_seed: Random seed to be used when using deterministic training. - :vartype random_seed: str - :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in - the range [0, 1]. - :vartype step_lr_gamma: str - :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a - positive integer. - :vartype step_lr_step_size: str - :ivar training_batch_size: Training batch size. Must be a positive integer. - :vartype training_batch_size: str - :ivar validation_batch_size: Validation batch size. Must be a positive integer. - :vartype validation_batch_size: str - :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :vartype warmup_cosine_lr_cycles: str - :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :vartype warmup_cosine_lr_warmup_epochs: str - :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be - a float in the range[0, 1]. - :vartype weight_decay: str - :ivar box_detections_per_image: Maximum number of detections per image, for all classes. Must - be a positive integer. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype box_detections_per_image: str - :ivar box_score_threshold: During inference, only return proposals with a classification score - greater than - BoxScoreThreshold. Must be a float in the range[0, 1]. - :vartype box_score_threshold: str - :ivar image_size: Image size for train and validation. Must be a positive integer. - Note: The training run may get into CUDA OOM if the size is too big. - Note: This settings is only supported for the 'yolov5' algorithm. - :vartype image_size: str - :ivar max_size: Maximum size of the image to be rescaled before feeding it to the backbone. - Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype max_size: str - :ivar min_size: Minimum size of the image to be rescaled before feeding it to the backbone. - Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype min_size: str - :ivar model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. - Note: training run may get into CUDA OOM if the model size is too big. - Note: This settings is only supported for the 'yolov5' algorithm. - :vartype model_size: str - :ivar multi_scale: Enable multi-scale image by varying image size by +/- 50%. - Note: training run may get into CUDA OOM if no sufficient GPU memory. - Note: This settings is only supported for the 'yolov5' algorithm. - :vartype multi_scale: str - :ivar nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be - float in the range [0, 1]. - :vartype nms_iou_threshold: str - :ivar tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must not - be - None to enable small object detection logic. A string containing two integers in mxn format. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype tile_grid_size: str - :ivar tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be float - in the range [0, 1). - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype tile_overlap_ratio: str - :ivar tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging - predictions from tiles and image. - Used in validation/ inference. Must be float in the range [0, 1]. - Note: This settings is not supported for the 'yolov5' algorithm. - NMS: Non-maximum suppression. - :vartype tile_predictions_nms_threshold: str - :ivar validation_iou_threshold: IOU threshold to use when computing validation metric. Must be - float in the range [0, 1]. - :vartype validation_iou_threshold: str - :ivar validation_metric_type: Metric computation method to use for validation metrics. Must be - 'none', 'coco', 'voc', or 'coco_voc'. - :vartype validation_metric_type: str + :ivar mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar signal_type: [Required] Specifies the type of signal to monitor. Required. Known values + are: "DataDrift", "PredictionDrift", "DataQuality", "FeatureAttributionDrift", "Custom", + "ModelPerformance", "GenerationSafetyQuality", and "GenerationTokenStatistics". + :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType + :ivar metric_thresholds: [Required] Gets or sets the metrics to calculate and the corresponding + thresholds. Required. + :vartype metric_thresholds: + list[~azure.mgmt.machinelearningservices.models.GenerationSafetyQualityMetricThreshold] + :ivar production_data: Gets or sets the target data for computing metrics. + :vartype production_data: + list[~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase] + :ivar sampling_rate: [Required] The sample rate of the target data, should be greater than 0 + and at most 1. Required. + :vartype sampling_rate: float + :ivar workspace_connection_id: Gets or sets the workspace connection ID used to connect to the + content generation endpoint. + :vartype workspace_connection_id: str """ + _validation = { + "signal_type": {"required": True}, + "metric_thresholds": {"required": True}, + "sampling_rate": {"required": True}, + } + _attribute_map = { - "ams_gradient": {"key": "amsGradient", "type": "str"}, - "augmentations": {"key": "augmentations", "type": "str"}, - "beta1": {"key": "beta1", "type": "str"}, - "beta2": {"key": "beta2", "type": "str"}, - "distributed": {"key": "distributed", "type": "str"}, - "early_stopping": {"key": "earlyStopping", "type": "str"}, - "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "str"}, - "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "str"}, - "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "str"}, - "evaluation_frequency": {"key": "evaluationFrequency", "type": "str"}, - "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "str"}, - "layers_to_freeze": {"key": "layersToFreeze", "type": "str"}, - "learning_rate": {"key": "learningRate", "type": "str"}, - "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, - "model_name": {"key": "modelName", "type": "str"}, - "momentum": {"key": "momentum", "type": "str"}, - "nesterov": {"key": "nesterov", "type": "str"}, - "number_of_epochs": {"key": "numberOfEpochs", "type": "str"}, - "number_of_workers": {"key": "numberOfWorkers", "type": "str"}, - "optimizer": {"key": "optimizer", "type": "str"}, - "random_seed": {"key": "randomSeed", "type": "str"}, - "step_lr_gamma": {"key": "stepLRGamma", "type": "str"}, - "step_lr_step_size": {"key": "stepLRStepSize", "type": "str"}, - "training_batch_size": {"key": "trainingBatchSize", "type": "str"}, - "validation_batch_size": {"key": "validationBatchSize", "type": "str"}, - "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "str"}, - "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "str"}, - "weight_decay": {"key": "weightDecay", "type": "str"}, - "box_detections_per_image": {"key": "boxDetectionsPerImage", "type": "str"}, - "box_score_threshold": {"key": "boxScoreThreshold", "type": "str"}, - "image_size": {"key": "imageSize", "type": "str"}, - "max_size": {"key": "maxSize", "type": "str"}, - "min_size": {"key": "minSize", "type": "str"}, - "model_size": {"key": "modelSize", "type": "str"}, - "multi_scale": {"key": "multiScale", "type": "str"}, - "nms_iou_threshold": {"key": "nmsIouThreshold", "type": "str"}, - "tile_grid_size": {"key": "tileGridSize", "type": "str"}, - "tile_overlap_ratio": {"key": "tileOverlapRatio", "type": "str"}, - "tile_predictions_nms_threshold": {"key": "tilePredictionsNmsThreshold", "type": "str"}, - "validation_iou_threshold": {"key": "validationIouThreshold", "type": "str"}, - "validation_metric_type": {"key": "validationMetricType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "metric_thresholds": {"key": "metricThresholds", "type": "[GenerationSafetyQualityMetricThreshold]"}, + "production_data": {"key": "productionData", "type": "[MonitoringInputDataBase]"}, + "sampling_rate": {"key": "samplingRate", "type": "float"}, + "workspace_connection_id": {"key": "workspaceConnectionId", "type": "str"}, } - def __init__( # pylint: disable=too-many-locals + def __init__( self, *, - ams_gradient: Optional[str] = None, - augmentations: Optional[str] = None, - beta1: Optional[str] = None, - beta2: Optional[str] = None, - distributed: Optional[str] = None, - early_stopping: Optional[str] = None, - early_stopping_delay: Optional[str] = None, - early_stopping_patience: Optional[str] = None, - enable_onnx_normalization: Optional[str] = None, - evaluation_frequency: Optional[str] = None, - gradient_accumulation_step: Optional[str] = None, - layers_to_freeze: Optional[str] = None, - learning_rate: Optional[str] = None, - learning_rate_scheduler: Optional[str] = None, - model_name: Optional[str] = None, - momentum: Optional[str] = None, - nesterov: Optional[str] = None, - number_of_epochs: Optional[str] = None, - number_of_workers: Optional[str] = None, - optimizer: Optional[str] = None, - random_seed: Optional[str] = None, - step_lr_gamma: Optional[str] = None, - step_lr_step_size: Optional[str] = None, - training_batch_size: Optional[str] = None, - validation_batch_size: Optional[str] = None, - warmup_cosine_lr_cycles: Optional[str] = None, - warmup_cosine_lr_warmup_epochs: Optional[str] = None, - weight_decay: Optional[str] = None, - box_detections_per_image: Optional[str] = None, - box_score_threshold: Optional[str] = None, - image_size: Optional[str] = None, - max_size: Optional[str] = None, - min_size: Optional[str] = None, - model_size: Optional[str] = None, - multi_scale: Optional[str] = None, - nms_iou_threshold: Optional[str] = None, - tile_grid_size: Optional[str] = None, - tile_overlap_ratio: Optional[str] = None, - tile_predictions_nms_threshold: Optional[str] = None, - validation_iou_threshold: Optional[str] = None, - validation_metric_type: Optional[str] = None, + metric_thresholds: List["_models.GenerationSafetyQualityMetricThreshold"], + sampling_rate: float, + mode: Optional[Union[str, "_models.MonitoringNotificationMode"]] = None, + properties: Optional[Dict[str, str]] = None, + production_data: Optional[List["_models.MonitoringInputDataBase"]] = None, + workspace_connection_id: Optional[str] = None, **kwargs: Any ) -> None: """ - :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :paramtype ams_gradient: str - :keyword augmentations: Settings for using Augmentations. - :paramtype augmentations: str - :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta1: str - :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta2: str - :keyword distributed: Whether to use distributer training. - :paramtype distributed: str - :keyword early_stopping: Enable early stopping logic during training. - :paramtype early_stopping: str - :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait - before primary metric improvement - is tracked for early stopping. Must be a positive integer. - :paramtype early_stopping_delay: str - :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :paramtype early_stopping_patience: str - :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. - :paramtype enable_onnx_normalization: str - :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. - Must be a positive integer. - :paramtype evaluation_frequency: str - :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :paramtype gradient_accumulation_step: str - :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive - integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype layers_to_freeze: str - :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :paramtype learning_rate: str - :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. - :paramtype learning_rate_scheduler: str - :keyword model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype model_name: str - :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, - 1]. - :paramtype momentum: str - :keyword nesterov: Enable nesterov when optimizer is 'sgd'. - :paramtype nesterov: str - :keyword number_of_epochs: Number of training epochs. Must be a positive integer. - :paramtype number_of_epochs: str - :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. - :paramtype number_of_workers: str - :keyword optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. - :paramtype optimizer: str - :keyword random_seed: Random seed to be used when using deterministic training. - :paramtype random_seed: str - :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float - in the range [0, 1]. - :paramtype step_lr_gamma: str - :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be - a positive integer. - :paramtype step_lr_step_size: str - :keyword training_batch_size: Training batch size. Must be a positive integer. - :paramtype training_batch_size: str - :keyword validation_batch_size: Validation batch size. Must be a positive integer. - :paramtype validation_batch_size: str - :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :paramtype warmup_cosine_lr_cycles: str - :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :paramtype warmup_cosine_lr_warmup_epochs: str - :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must - be a float in the range[0, 1]. - :paramtype weight_decay: str - :keyword box_detections_per_image: Maximum number of detections per image, for all classes. - Must be a positive integer. - Note: This settings is not supported for the 'yolov5' algorithm. - :paramtype box_detections_per_image: str - :keyword box_score_threshold: During inference, only return proposals with a classification - score greater than - BoxScoreThreshold. Must be a float in the range[0, 1]. - :paramtype box_score_threshold: str - :keyword image_size: Image size for train and validation. Must be a positive integer. - Note: The training run may get into CUDA OOM if the size is too big. - Note: This settings is only supported for the 'yolov5' algorithm. - :paramtype image_size: str - :keyword max_size: Maximum size of the image to be rescaled before feeding it to the backbone. - Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. - Note: This settings is not supported for the 'yolov5' algorithm. - :paramtype max_size: str - :keyword min_size: Minimum size of the image to be rescaled before feeding it to the backbone. - Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. - Note: This settings is not supported for the 'yolov5' algorithm. - :paramtype min_size: str - :keyword model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. - Note: training run may get into CUDA OOM if the model size is too big. - Note: This settings is only supported for the 'yolov5' algorithm. - :paramtype model_size: str - :keyword multi_scale: Enable multi-scale image by varying image size by +/- 50%. - Note: training run may get into CUDA OOM if no sufficient GPU memory. - Note: This settings is only supported for the 'yolov5' algorithm. - :paramtype multi_scale: str - :keyword nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be - float in the range [0, 1]. - :paramtype nms_iou_threshold: str - :keyword tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must - not be - None to enable small object detection logic. A string containing two integers in mxn format. - Note: This settings is not supported for the 'yolov5' algorithm. - :paramtype tile_grid_size: str - :keyword tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be - float in the range [0, 1). - Note: This settings is not supported for the 'yolov5' algorithm. - :paramtype tile_overlap_ratio: str - :keyword tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging - predictions from tiles and image. - Used in validation/ inference. Must be float in the range [0, 1]. - Note: This settings is not supported for the 'yolov5' algorithm. - NMS: Non-maximum suppression. - :paramtype tile_predictions_nms_threshold: str - :keyword validation_iou_threshold: IOU threshold to use when computing validation metric. Must - be float in the range [0, 1]. - :paramtype validation_iou_threshold: str - :keyword validation_metric_type: Metric computation method to use for validation metrics. Must - be 'none', 'coco', 'voc', or 'coco_voc'. - :paramtype validation_metric_type: str + :keyword mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword metric_thresholds: [Required] Gets or sets the metrics to calculate and the + corresponding thresholds. Required. + :paramtype metric_thresholds: + list[~azure.mgmt.machinelearningservices.models.GenerationSafetyQualityMetricThreshold] + :keyword production_data: Gets or sets the target data for computing metrics. + :paramtype production_data: + list[~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase] + :keyword sampling_rate: [Required] The sample rate of the target data, should be greater than 0 + and at most 1. Required. + :paramtype sampling_rate: float + :keyword workspace_connection_id: Gets or sets the workspace connection ID used to connect to + the content generation endpoint. + :paramtype workspace_connection_id: str + """ + super().__init__(mode=mode, properties=properties, **kwargs) + self.signal_type: str = "GenerationSafetyQuality" + self.metric_thresholds = metric_thresholds + self.production_data = production_data + self.sampling_rate = sampling_rate + self.workspace_connection_id = workspace_connection_id + + +class GenerationTokenStatisticsMetricThreshold(_serialization.Model): + """Generation token statistics metric threshold definition. + + All required parameters must be populated in order to send to Azure. + + :ivar metric: [Required] Gets or sets the feature attribution metric to calculate. Required. + Known values are: "TotalTokenCount" and "TotalTokenCountPerGroup". + :vartype metric: str or + ~azure.mgmt.machinelearningservices.models.GenerationTokenStatisticsMetric + :ivar threshold: Gets or sets the threshold value. + If null, a default value will be set depending on the selected metric. + :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + """ + + _validation = { + "metric": {"required": True}, + } + + _attribute_map = { + "metric": {"key": "metric", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + } + + def __init__( + self, + *, + metric: Union[str, "_models.GenerationTokenStatisticsMetric"], + threshold: Optional["_models.MonitoringThreshold"] = None, + **kwargs: Any + ) -> None: """ - super().__init__( - ams_gradient=ams_gradient, - augmentations=augmentations, - beta1=beta1, - beta2=beta2, - distributed=distributed, - early_stopping=early_stopping, - early_stopping_delay=early_stopping_delay, - early_stopping_patience=early_stopping_patience, - enable_onnx_normalization=enable_onnx_normalization, - evaluation_frequency=evaluation_frequency, - gradient_accumulation_step=gradient_accumulation_step, - layers_to_freeze=layers_to_freeze, - learning_rate=learning_rate, - learning_rate_scheduler=learning_rate_scheduler, - model_name=model_name, - momentum=momentum, - nesterov=nesterov, - number_of_epochs=number_of_epochs, - number_of_workers=number_of_workers, - optimizer=optimizer, - random_seed=random_seed, - step_lr_gamma=step_lr_gamma, - step_lr_step_size=step_lr_step_size, - training_batch_size=training_batch_size, - validation_batch_size=validation_batch_size, - warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, - warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, - weight_decay=weight_decay, - **kwargs - ) - self.box_detections_per_image = box_detections_per_image - self.box_score_threshold = box_score_threshold - self.image_size = image_size - self.max_size = max_size - self.min_size = min_size - self.model_size = model_size - self.multi_scale = multi_scale - self.nms_iou_threshold = nms_iou_threshold - self.tile_grid_size = tile_grid_size - self.tile_overlap_ratio = tile_overlap_ratio - self.tile_predictions_nms_threshold = tile_predictions_nms_threshold - self.validation_iou_threshold = validation_iou_threshold - self.validation_metric_type = validation_metric_type + :keyword metric: [Required] Gets or sets the feature attribution metric to calculate. Required. + Known values are: "TotalTokenCount" and "TotalTokenCountPerGroup". + :paramtype metric: str or + ~azure.mgmt.machinelearningservices.models.GenerationTokenStatisticsMetric + :keyword threshold: Gets or sets the threshold value. + If null, a default value will be set depending on the selected metric. + :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + """ + super().__init__(**kwargs) + self.metric = metric + self.threshold = threshold -class ImageModelSettings(_serialization.Model): # pylint: disable=too-many-instance-attributes - """Settings used for training the model. - For more information on the available settings please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. +class GenerationTokenStatisticsSignal(MonitoringSignalBase): + """Generation token statistics signal definition. - :ivar advanced_settings: Settings for advanced scenarios. - :vartype advanced_settings: str - :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :vartype ams_gradient: bool - :ivar augmentations: Settings for using Augmentations. - :vartype augmentations: str - :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta1: float - :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta2: float - :ivar checkpoint_frequency: Frequency to store model checkpoints. Must be a positive integer. - :vartype checkpoint_frequency: int - :ivar checkpoint_model: The pretrained checkpoint model for incremental training. - :vartype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput - :ivar checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for - incremental training. - :vartype checkpoint_run_id: str - :ivar distributed: Whether to use distributed training. - :vartype distributed: bool - :ivar early_stopping: Enable early stopping logic during training. - :vartype early_stopping: bool - :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before - primary metric improvement - is tracked for early stopping. Must be a positive integer. - :vartype early_stopping_delay: int - :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :vartype early_stopping_patience: int - :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. - :vartype enable_onnx_normalization: bool - :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must - be a positive integer. - :vartype evaluation_frequency: int - :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :vartype gradient_accumulation_step: int - :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype layers_to_freeze: int - :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :vartype learning_rate: float - :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. Known values are: "None", "WarmupCosine", and "Step". - :vartype learning_rate_scheduler: str or - ~azure.mgmt.machinelearningservices.models.LearningRateScheduler - :ivar model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype model_name: str - :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. - :vartype momentum: float - :ivar nesterov: Enable nesterov when optimizer is 'sgd'. - :vartype nesterov: bool - :ivar number_of_epochs: Number of training epochs. Must be a positive integer. - :vartype number_of_epochs: int - :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. - :vartype number_of_workers: int - :ivar optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", and "Adamw". - :vartype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer - :ivar random_seed: Random seed to be used when using deterministic training. - :vartype random_seed: int - :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in - the range [0, 1]. - :vartype step_lr_gamma: float - :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a - positive integer. - :vartype step_lr_step_size: int - :ivar training_batch_size: Training batch size. Must be a positive integer. - :vartype training_batch_size: int - :ivar validation_batch_size: Validation batch size. Must be a positive integer. - :vartype validation_batch_size: int - :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :vartype warmup_cosine_lr_cycles: float - :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :vartype warmup_cosine_lr_warmup_epochs: int - :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be - a float in the range[0, 1]. - :vartype weight_decay: float + All required parameters must be populated in order to send to Azure. + + :ivar mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar signal_type: [Required] Specifies the type of signal to monitor. Required. Known values + are: "DataDrift", "PredictionDrift", "DataQuality", "FeatureAttributionDrift", "Custom", + "ModelPerformance", "GenerationSafetyQuality", and "GenerationTokenStatistics". + :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType + :ivar metric_thresholds: [Required] Gets or sets the metrics to calculate and the corresponding + thresholds. Required. + :vartype metric_thresholds: + list[~azure.mgmt.machinelearningservices.models.GenerationTokenStatisticsMetricThreshold] + :ivar production_data: Gets or sets the target data for computing metrics. + :vartype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase + :ivar sampling_rate: [Required] The sample rate of the target data, should be greater than 0 + and at most 1. Required. + :vartype sampling_rate: float """ + _validation = { + "signal_type": {"required": True}, + "metric_thresholds": {"required": True}, + "sampling_rate": {"required": True}, + } + _attribute_map = { - "advanced_settings": {"key": "advancedSettings", "type": "str"}, - "ams_gradient": {"key": "amsGradient", "type": "bool"}, - "augmentations": {"key": "augmentations", "type": "str"}, - "beta1": {"key": "beta1", "type": "float"}, - "beta2": {"key": "beta2", "type": "float"}, - "checkpoint_frequency": {"key": "checkpointFrequency", "type": "int"}, - "checkpoint_model": {"key": "checkpointModel", "type": "MLFlowModelJobInput"}, - "checkpoint_run_id": {"key": "checkpointRunId", "type": "str"}, - "distributed": {"key": "distributed", "type": "bool"}, - "early_stopping": {"key": "earlyStopping", "type": "bool"}, - "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "int"}, - "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "int"}, - "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "bool"}, - "evaluation_frequency": {"key": "evaluationFrequency", "type": "int"}, - "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "int"}, - "layers_to_freeze": {"key": "layersToFreeze", "type": "int"}, - "learning_rate": {"key": "learningRate", "type": "float"}, - "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, - "model_name": {"key": "modelName", "type": "str"}, - "momentum": {"key": "momentum", "type": "float"}, - "nesterov": {"key": "nesterov", "type": "bool"}, - "number_of_epochs": {"key": "numberOfEpochs", "type": "int"}, - "number_of_workers": {"key": "numberOfWorkers", "type": "int"}, - "optimizer": {"key": "optimizer", "type": "str"}, - "random_seed": {"key": "randomSeed", "type": "int"}, - "step_lr_gamma": {"key": "stepLRGamma", "type": "float"}, - "step_lr_step_size": {"key": "stepLRStepSize", "type": "int"}, - "training_batch_size": {"key": "trainingBatchSize", "type": "int"}, - "validation_batch_size": {"key": "validationBatchSize", "type": "int"}, - "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "float"}, - "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "int"}, - "weight_decay": {"key": "weightDecay", "type": "float"}, + "mode": {"key": "mode", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "metric_thresholds": {"key": "metricThresholds", "type": "[GenerationTokenStatisticsMetricThreshold]"}, + "production_data": {"key": "productionData", "type": "MonitoringInputDataBase"}, + "sampling_rate": {"key": "samplingRate", "type": "float"}, } - def __init__( # pylint: disable=too-many-locals + def __init__( self, *, - advanced_settings: Optional[str] = None, - ams_gradient: Optional[bool] = None, - augmentations: Optional[str] = None, - beta1: Optional[float] = None, - beta2: Optional[float] = None, - checkpoint_frequency: Optional[int] = None, - checkpoint_model: Optional["_models.MLFlowModelJobInput"] = None, - checkpoint_run_id: Optional[str] = None, - distributed: Optional[bool] = None, - early_stopping: Optional[bool] = None, - early_stopping_delay: Optional[int] = None, - early_stopping_patience: Optional[int] = None, - enable_onnx_normalization: Optional[bool] = None, - evaluation_frequency: Optional[int] = None, - gradient_accumulation_step: Optional[int] = None, - layers_to_freeze: Optional[int] = None, - learning_rate: Optional[float] = None, - learning_rate_scheduler: Optional[Union[str, "_models.LearningRateScheduler"]] = None, - model_name: Optional[str] = None, - momentum: Optional[float] = None, - nesterov: Optional[bool] = None, - number_of_epochs: Optional[int] = None, - number_of_workers: Optional[int] = None, - optimizer: Optional[Union[str, "_models.StochasticOptimizer"]] = None, - random_seed: Optional[int] = None, - step_lr_gamma: Optional[float] = None, - step_lr_step_size: Optional[int] = None, - training_batch_size: Optional[int] = None, - validation_batch_size: Optional[int] = None, - warmup_cosine_lr_cycles: Optional[float] = None, - warmup_cosine_lr_warmup_epochs: Optional[int] = None, - weight_decay: Optional[float] = None, + metric_thresholds: List["_models.GenerationTokenStatisticsMetricThreshold"], + sampling_rate: float, + mode: Optional[Union[str, "_models.MonitoringNotificationMode"]] = None, + properties: Optional[Dict[str, str]] = None, + production_data: Optional["_models.MonitoringInputDataBase"] = None, **kwargs: Any ) -> None: """ - :keyword advanced_settings: Settings for advanced scenarios. - :paramtype advanced_settings: str - :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :paramtype ams_gradient: bool - :keyword augmentations: Settings for using Augmentations. - :paramtype augmentations: str - :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta1: float - :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta2: float - :keyword checkpoint_frequency: Frequency to store model checkpoints. Must be a positive - integer. - :paramtype checkpoint_frequency: int - :keyword checkpoint_model: The pretrained checkpoint model for incremental training. - :paramtype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput - :keyword checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for - incremental training. - :paramtype checkpoint_run_id: str - :keyword distributed: Whether to use distributed training. - :paramtype distributed: bool - :keyword early_stopping: Enable early stopping logic during training. - :paramtype early_stopping: bool - :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait - before primary metric improvement - is tracked for early stopping. Must be a positive integer. - :paramtype early_stopping_delay: int - :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :paramtype early_stopping_patience: int - :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. - :paramtype enable_onnx_normalization: bool - :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. - Must be a positive integer. - :paramtype evaluation_frequency: int - :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :paramtype gradient_accumulation_step: int - :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive - integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype layers_to_freeze: int - :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :paramtype learning_rate: float - :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. Known values are: "None", "WarmupCosine", and "Step". - :paramtype learning_rate_scheduler: str or - ~azure.mgmt.machinelearningservices.models.LearningRateScheduler - :keyword model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype model_name: str - :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, - 1]. - :paramtype momentum: float - :keyword nesterov: Enable nesterov when optimizer is 'sgd'. - :paramtype nesterov: bool - :keyword number_of_epochs: Number of training epochs. Must be a positive integer. - :paramtype number_of_epochs: int - :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. - :paramtype number_of_workers: int - :keyword optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", and "Adamw". - :paramtype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer - :keyword random_seed: Random seed to be used when using deterministic training. - :paramtype random_seed: int - :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float - in the range [0, 1]. - :paramtype step_lr_gamma: float - :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be - a positive integer. - :paramtype step_lr_step_size: int - :keyword training_batch_size: Training batch size. Must be a positive integer. - :paramtype training_batch_size: int - :keyword validation_batch_size: Validation batch size. Must be a positive integer. - :paramtype validation_batch_size: int - :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :paramtype warmup_cosine_lr_cycles: float - :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :paramtype warmup_cosine_lr_warmup_epochs: int - :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must - be a float in the range[0, 1]. - :paramtype weight_decay: float - """ - super().__init__(**kwargs) - self.advanced_settings = advanced_settings - self.ams_gradient = ams_gradient - self.augmentations = augmentations - self.beta1 = beta1 - self.beta2 = beta2 - self.checkpoint_frequency = checkpoint_frequency - self.checkpoint_model = checkpoint_model - self.checkpoint_run_id = checkpoint_run_id - self.distributed = distributed - self.early_stopping = early_stopping - self.early_stopping_delay = early_stopping_delay - self.early_stopping_patience = early_stopping_patience - self.enable_onnx_normalization = enable_onnx_normalization - self.evaluation_frequency = evaluation_frequency - self.gradient_accumulation_step = gradient_accumulation_step - self.layers_to_freeze = layers_to_freeze - self.learning_rate = learning_rate - self.learning_rate_scheduler = learning_rate_scheduler - self.model_name = model_name - self.momentum = momentum - self.nesterov = nesterov - self.number_of_epochs = number_of_epochs - self.number_of_workers = number_of_workers - self.optimizer = optimizer - self.random_seed = random_seed - self.step_lr_gamma = step_lr_gamma - self.step_lr_step_size = step_lr_step_size - self.training_batch_size = training_batch_size - self.validation_batch_size = validation_batch_size - self.warmup_cosine_lr_cycles = warmup_cosine_lr_cycles - self.warmup_cosine_lr_warmup_epochs = warmup_cosine_lr_warmup_epochs - self.weight_decay = weight_decay + :keyword mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword metric_thresholds: [Required] Gets or sets the metrics to calculate and the + corresponding thresholds. Required. + :paramtype metric_thresholds: + list[~azure.mgmt.machinelearningservices.models.GenerationTokenStatisticsMetricThreshold] + :keyword production_data: Gets or sets the target data for computing metrics. + :paramtype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase + :keyword sampling_rate: [Required] The sample rate of the target data, should be greater than 0 + and at most 1. Required. + :paramtype sampling_rate: float + """ + super().__init__(mode=mode, properties=properties, **kwargs) + self.signal_type: str = "GenerationTokenStatistics" + self.metric_thresholds = metric_thresholds + self.production_data = production_data + self.sampling_rate = sampling_rate -class ImageModelSettingsClassification(ImageModelSettings): # pylint: disable=too-many-instance-attributes - """Settings used for training the model. - For more information on the available settings please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. +class GridSamplingAlgorithm(SamplingAlgorithm): + """Defines a Sampling Algorithm that exhaustively generates every value combination in the space. - :ivar advanced_settings: Settings for advanced scenarios. - :vartype advanced_settings: str - :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :vartype ams_gradient: bool + All required parameters must be populated in order to send to Azure. + + :ivar sampling_algorithm_type: [Required] The algorithm used for generating hyperparameter + values, along with configuration properties. Required. Known values are: "Grid", "Random", and + "Bayesian". + :vartype sampling_algorithm_type: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + """ + + _validation = { + "sampling_algorithm_type": {"required": True}, + } + + _attribute_map = { + "sampling_algorithm_type": {"key": "samplingAlgorithmType", "type": "str"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.sampling_algorithm_type: str = "Grid" + + +class GroupStatus(_serialization.Model): + """GroupStatus. + + :ivar actual_capacity_info: Gets or sets the actual capacity info for the group. + :vartype actual_capacity_info: ~azure.mgmt.machinelearningservices.models.ActualCapacityInfo + :ivar bonus_extra_capacity: Gets or sets capacity used from the pool's reserved capacity. + :vartype bonus_extra_capacity: int + :ivar endpoint_count: Gets or sets the actual number of endpoints in the group. + :vartype endpoint_count: int + :ivar event_log: Gets or sets event log for inference group. + :vartype event_log: list[JSON] + :ivar requested_capacity: Gets or sets the request number of instances for the group. + :vartype requested_capacity: int + """ + + _attribute_map = { + "actual_capacity_info": {"key": "actualCapacityInfo", "type": "ActualCapacityInfo"}, + "bonus_extra_capacity": {"key": "bonusExtraCapacity", "type": "int"}, + "endpoint_count": {"key": "endpointCount", "type": "int"}, + "event_log": {"key": "eventLog", "type": "[object]"}, + "requested_capacity": {"key": "requestedCapacity", "type": "int"}, + } + + def __init__( + self, + *, + actual_capacity_info: Optional["_models.ActualCapacityInfo"] = None, + bonus_extra_capacity: int = 0, + endpoint_count: int = 0, + event_log: Optional[List[JSON]] = None, + requested_capacity: int = 0, + **kwargs: Any + ) -> None: + """ + :keyword actual_capacity_info: Gets or sets the actual capacity info for the group. + :paramtype actual_capacity_info: ~azure.mgmt.machinelearningservices.models.ActualCapacityInfo + :keyword bonus_extra_capacity: Gets or sets capacity used from the pool's reserved capacity. + :paramtype bonus_extra_capacity: int + :keyword endpoint_count: Gets or sets the actual number of endpoints in the group. + :paramtype endpoint_count: int + :keyword event_log: Gets or sets event log for inference group. + :paramtype event_log: list[JSON] + :keyword requested_capacity: Gets or sets the request number of instances for the group. + :paramtype requested_capacity: int + """ + super().__init__(**kwargs) + self.actual_capacity_info = actual_capacity_info + self.bonus_extra_capacity = bonus_extra_capacity + self.endpoint_count = endpoint_count + self.event_log = event_log + self.requested_capacity = requested_capacity + + +class HdfsDatastore(DatastoreProperties): + """HdfsDatastore. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar credentials: [Required] Account credentials. Required. + :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :ivar datastore_type: [Required] Storage type backing the datastore. Required. Known values + are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile", "Hdfs", and "OneLake". + :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType + :ivar intellectual_property: Intellectual Property details. + :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty + :ivar is_default: Readonly property to indicate if datastore is the workspace default + datastore. + :vartype is_default: bool + :ivar hdfs_server_certificate: The TLS cert of the HDFS server. Needs to be a base64 encoded + string. Required if "Https" protocol is selected. + :vartype hdfs_server_certificate: str + :ivar name_node_address: [Required] IP Address or DNS HostName. Required. + :vartype name_node_address: str + :ivar protocol: Protocol used to communicate with the storage account (Https/Http). + :vartype protocol: str + """ + + _validation = { + "credentials": {"required": True}, + "datastore_type": {"required": True}, + "is_default": {"readonly": True}, + "name_node_address": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, + "datastore_type": {"key": "datastoreType", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "is_default": {"key": "isDefault", "type": "bool"}, + "hdfs_server_certificate": {"key": "hdfsServerCertificate", "type": "str"}, + "name_node_address": {"key": "nameNodeAddress", "type": "str"}, + "protocol": {"key": "protocol", "type": "str"}, + } + + def __init__( + self, + *, + credentials: "_models.DatastoreCredentials", + name_node_address: str, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + intellectual_property: Optional["_models.IntellectualProperty"] = None, + hdfs_server_certificate: Optional[str] = None, + protocol: str = "http", + **kwargs: Any + ) -> None: + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword credentials: [Required] Account credentials. Required. + :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :keyword intellectual_property: Intellectual Property details. + :paramtype intellectual_property: + ~azure.mgmt.machinelearningservices.models.IntellectualProperty + :keyword hdfs_server_certificate: The TLS cert of the HDFS server. Needs to be a base64 encoded + string. Required if "Https" protocol is selected. + :paramtype hdfs_server_certificate: str + :keyword name_node_address: [Required] IP Address or DNS HostName. Required. + :paramtype name_node_address: str + :keyword protocol: Protocol used to communicate with the storage account (Https/Http). + :paramtype protocol: str + """ + super().__init__( + description=description, + properties=properties, + tags=tags, + credentials=credentials, + intellectual_property=intellectual_property, + **kwargs + ) + self.datastore_type: str = "Hdfs" + self.hdfs_server_certificate = hdfs_server_certificate + self.name_node_address = name_node_address + self.protocol = protocol + + +class HDInsightSchema(_serialization.Model): + """HDInsightSchema. + + :ivar properties: HDInsight compute properties. + :vartype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties + """ + + _attribute_map = { + "properties": {"key": "properties", "type": "HDInsightProperties"}, + } + + def __init__(self, *, properties: Optional["_models.HDInsightProperties"] = None, **kwargs: Any) -> None: + """ + :keyword properties: HDInsight compute properties. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class HDInsight(Compute, HDInsightSchema): # pylint: disable=too-many-instance-attributes + """A HDInsight compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar properties: HDInsight compute properties. + :vartype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: ~datetime.datetime + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool + """ + + _validation = { + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, + } + + _attribute_map = { + "properties": {"key": "properties", "type": "HDInsightProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, + } + + def __init__( + self, + *, + properties: Optional["_models.HDInsightProperties"] = None, + compute_location: Optional[str] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + disable_local_auth: Optional[bool] = None, + **kwargs: Any + ) -> None: + """ + :keyword properties: HDInsight compute properties. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties + :keyword compute_location: Location for the underlying compute. + :paramtype compute_location: str + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only + MSI and AAD exclusively for authentication. + :paramtype disable_local_auth: bool + """ + super().__init__( + compute_location=compute_location, + description=description, + resource_id=resource_id, + disable_local_auth=disable_local_auth, + properties=properties, + **kwargs + ) + self.properties = properties + self.compute_type: str = "HDInsight" + self.compute_location = compute_location + self.provisioning_state = None + self.description = description + self.created_on = None + self.modified_on = None + self.resource_id = resource_id + self.provisioning_errors = None + self.is_attached_compute = None + self.disable_local_auth = disable_local_auth + + +class HDInsightProperties(_serialization.Model): + """HDInsight compute properties. + + :ivar ssh_port: Port open for ssh connections on the master node of the cluster. + :vartype ssh_port: int + :ivar address: Public IP address of the master node of the cluster. + :vartype address: str + :ivar administrator_account: Admin credentials for master node of the cluster. + :vartype administrator_account: + ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + """ + + _attribute_map = { + "ssh_port": {"key": "sshPort", "type": "int"}, + "address": {"key": "address", "type": "str"}, + "administrator_account": {"key": "administratorAccount", "type": "VirtualMachineSshCredentials"}, + } + + def __init__( + self, + *, + ssh_port: Optional[int] = None, + address: Optional[str] = None, + administrator_account: Optional["_models.VirtualMachineSshCredentials"] = None, + **kwargs: Any + ) -> None: + """ + :keyword ssh_port: Port open for ssh connections on the master node of the cluster. + :paramtype ssh_port: int + :keyword address: Public IP address of the master node of the cluster. + :paramtype address: str + :keyword administrator_account: Admin credentials for master node of the cluster. + :paramtype administrator_account: + ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + """ + super().__init__(**kwargs) + self.ssh_port = ssh_port + self.address = address + self.administrator_account = administrator_account + + +class IdAssetReference(AssetReferenceBase): + """Reference to an asset via its ARM resource ID. + + All required parameters must be populated in order to send to Azure. + + :ivar reference_type: [Required] Specifies the type of asset reference. Required. Known values + are: "Id", "DataPath", and "OutputPath". + :vartype reference_type: str or ~azure.mgmt.machinelearningservices.models.ReferenceType + :ivar asset_id: [Required] ARM resource ID of the asset. Required. + :vartype asset_id: str + """ + + _validation = { + "reference_type": {"required": True}, + "asset_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "reference_type": {"key": "referenceType", "type": "str"}, + "asset_id": {"key": "assetId", "type": "str"}, + } + + def __init__(self, *, asset_id: str, **kwargs: Any) -> None: + """ + :keyword asset_id: [Required] ARM resource ID of the asset. Required. + :paramtype asset_id: str + """ + super().__init__(**kwargs) + self.reference_type: str = "Id" + self.asset_id = asset_id + + +class IdentityForCmk(_serialization.Model): + """Identity object used for encryption. + + :ivar user_assigned_identity: UserAssignedIdentity to be used to fetch the encryption key from + keyVault. + :vartype user_assigned_identity: str + """ + + _attribute_map = { + "user_assigned_identity": {"key": "userAssignedIdentity", "type": "str"}, + } + + def __init__(self, *, user_assigned_identity: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword user_assigned_identity: UserAssignedIdentity to be used to fetch the encryption key + from keyVault. + :paramtype user_assigned_identity: str + """ + super().__init__(**kwargs) + self.user_assigned_identity = user_assigned_identity + + +class IdleShutdownSetting(_serialization.Model): + """Stops compute instance after user defined period of inactivity. + + :ivar idle_time_before_shutdown: Time is defined in ISO8601 format. Minimum is 15 min, maximum + is 3 days. + :vartype idle_time_before_shutdown: str + """ + + _attribute_map = { + "idle_time_before_shutdown": {"key": "idleTimeBeforeShutdown", "type": "str"}, + } + + def __init__(self, *, idle_time_before_shutdown: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword idle_time_before_shutdown: Time is defined in ISO8601 format. Minimum is 15 min, + maximum is 3 days. + :paramtype idle_time_before_shutdown: str + """ + super().__init__(**kwargs) + self.idle_time_before_shutdown = idle_time_before_shutdown + + +class Image(_serialization.Model): + """Image. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Type of the image. Possible values are: docker - For docker images. azureml - For + AzureML images. Known values are: "docker" and "azureml". + :vartype type: str or ~azure.mgmt.machinelearningservices.models.ImageType + :ivar reference: Image reference URL. + :vartype reference: str + """ + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "reference": {"key": "reference", "type": "str"}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, Any]] = None, + type: Union[str, "_models.ImageType"] = "docker", + reference: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword type: Type of the image. Possible values are: docker - For docker images. azureml - + For AzureML images. Known values are: "docker" and "azureml". + :paramtype type: str or ~azure.mgmt.machinelearningservices.models.ImageType + :keyword reference: Image reference URL. + :paramtype reference: str + """ + super().__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.reference = reference + + +class ImageVertical(_serialization.Model): + """Abstract class for AutoML tasks that train image (computer vision) models - + such as Image Classification / Image Classification Multilabel / Image Object Detection / Image + Instance Segmentation. + + All required parameters must be populated in order to send to Azure. + + :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + """ + + _validation = { + "limit_settings": {"required": True}, + } + + _attribute_map = { + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + } + + def __init__( + self, + *, + limit_settings: "_models.ImageLimitSettings", + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + **kwargs: Any + ) -> None: + """ + :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + """ + super().__init__(**kwargs) + self.limit_settings = limit_settings + self.sweep_settings = sweep_settings + self.validation_data = validation_data + self.validation_data_size = validation_data_size + + +class ImageClassificationBase(ImageVertical): + """ImageClassificationBase. + + All required parameters must be populated in order to send to Azure. + + :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar model_settings: Settings used for training the model. + :vartype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] + """ + + _validation = { + "limit_settings": {"required": True}, + } + + _attribute_map = { + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsClassification"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsClassification]"}, + } + + def __init__( + self, + *, + limit_settings: "_models.ImageLimitSettings", + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + model_settings: Optional["_models.ImageModelSettingsClassification"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsClassification"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword model_settings: Settings used for training the model. + :paramtype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] + """ + super().__init__( + limit_settings=limit_settings, + sweep_settings=sweep_settings, + validation_data=validation_data, + validation_data_size=validation_data_size, + **kwargs + ) + self.model_settings = model_settings + self.search_space = search_space + + +class ImageClassification(ImageClassificationBase, AutoMLVertical): # pylint: disable=too-many-instance-attributes + """Image Classification. Multi-class image classification is used when an image is classified with + only a single label + from a set of classes - e.g. each image is classified as either an image of a 'cat' or a 'dog' + or a 'duck'. + + All required parameters must be populated in order to send to Azure. + + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar model_settings: Settings used for training the model. + :vartype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] + :ivar primary_metric: Primary metric to optimize for this task. Known values are: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", and + "PrecisionScoreWeighted". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics + """ + + _validation = { + "task_type": {"required": True}, + "training_data": {"required": True}, + "limit_settings": {"required": True}, + } + + _attribute_map = { + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsClassification"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsClassification]"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, + } + + def __init__( + self, + *, + training_data: "_models.MLTableJobInput", + limit_settings: "_models.ImageLimitSettings", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + model_settings: Optional["_models.ImageModelSettingsClassification"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsClassification"]] = None, + primary_metric: Optional[Union[str, "_models.ClassificationPrimaryMetrics"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword model_settings: Settings used for training the model. + :paramtype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] + :keyword primary_metric: Primary metric to optimize for this task. Known values are: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", and + "PrecisionScoreWeighted". + :paramtype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics + """ + super().__init__( + limit_settings=limit_settings, + sweep_settings=sweep_settings, + validation_data=validation_data, + validation_data_size=validation_data_size, + model_settings=model_settings, + search_space=search_space, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type: str = "ImageClassification" + self.training_data = training_data + self.primary_metric = primary_metric + self.limit_settings = limit_settings + self.sweep_settings = sweep_settings + self.validation_data = validation_data + self.validation_data_size = validation_data_size + self.model_settings = model_settings + self.search_space = search_space + + +class ImageClassificationMultilabel( + ImageClassificationBase, AutoMLVertical +): # pylint: disable=too-many-instance-attributes + """Image Classification Multilabel. Multi-label image classification is used when an image could + have one or more labels + from a set of labels - e.g. an image could be labeled with both 'cat' and 'dog'. + + All required parameters must be populated in order to send to Azure. + + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar model_settings: Settings used for training the model. + :vartype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] + :ivar primary_metric: Primary metric to optimize for this task. Known values are: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", + "PrecisionScoreWeighted", and "IOU". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationMultilabelPrimaryMetrics + """ + + _validation = { + "task_type": {"required": True}, + "training_data": {"required": True}, + "limit_settings": {"required": True}, + } + + _attribute_map = { + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsClassification"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsClassification]"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, + } + + def __init__( + self, + *, + training_data: "_models.MLTableJobInput", + limit_settings: "_models.ImageLimitSettings", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + model_settings: Optional["_models.ImageModelSettingsClassification"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsClassification"]] = None, + primary_metric: Optional[Union[str, "_models.ClassificationMultilabelPrimaryMetrics"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword model_settings: Settings used for training the model. + :paramtype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] + :keyword primary_metric: Primary metric to optimize for this task. Known values are: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", + "PrecisionScoreWeighted", and "IOU". + :paramtype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationMultilabelPrimaryMetrics + """ + super().__init__( + limit_settings=limit_settings, + sweep_settings=sweep_settings, + validation_data=validation_data, + validation_data_size=validation_data_size, + model_settings=model_settings, + search_space=search_space, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type: str = "ImageClassificationMultilabel" + self.training_data = training_data + self.primary_metric = primary_metric + self.limit_settings = limit_settings + self.sweep_settings = sweep_settings + self.validation_data = validation_data + self.validation_data_size = validation_data_size + self.model_settings = model_settings + self.search_space = search_space + + +class ImageObjectDetectionBase(ImageVertical): + """ImageObjectDetectionBase. + + All required parameters must be populated in order to send to Azure. + + :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar model_settings: Settings used for training the model. + :vartype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] + """ + + _validation = { + "limit_settings": {"required": True}, + } + + _attribute_map = { + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsObjectDetection"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsObjectDetection]"}, + } + + def __init__( + self, + *, + limit_settings: "_models.ImageLimitSettings", + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + model_settings: Optional["_models.ImageModelSettingsObjectDetection"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsObjectDetection"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword model_settings: Settings used for training the model. + :paramtype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] + """ + super().__init__( + limit_settings=limit_settings, + sweep_settings=sweep_settings, + validation_data=validation_data, + validation_data_size=validation_data_size, + **kwargs + ) + self.model_settings = model_settings + self.search_space = search_space + + +class ImageInstanceSegmentation( + ImageObjectDetectionBase, AutoMLVertical +): # pylint: disable=too-many-instance-attributes + """Image Instance Segmentation. Instance segmentation is used to identify objects in an image at + the pixel level, + drawing a polygon around each object in the image. + + All required parameters must be populated in order to send to Azure. + + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar model_settings: Settings used for training the model. + :vartype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] + :ivar primary_metric: Primary metric to optimize for this task. "MeanAveragePrecision" + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.InstanceSegmentationPrimaryMetrics + """ + + _validation = { + "task_type": {"required": True}, + "training_data": {"required": True}, + "limit_settings": {"required": True}, + } + + _attribute_map = { + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsObjectDetection"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsObjectDetection]"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, + } + + def __init__( + self, + *, + training_data: "_models.MLTableJobInput", + limit_settings: "_models.ImageLimitSettings", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + model_settings: Optional["_models.ImageModelSettingsObjectDetection"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsObjectDetection"]] = None, + primary_metric: Optional[Union[str, "_models.InstanceSegmentationPrimaryMetrics"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword model_settings: Settings used for training the model. + :paramtype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] + :keyword primary_metric: Primary metric to optimize for this task. "MeanAveragePrecision" + :paramtype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.InstanceSegmentationPrimaryMetrics + """ + super().__init__( + limit_settings=limit_settings, + sweep_settings=sweep_settings, + validation_data=validation_data, + validation_data_size=validation_data_size, + model_settings=model_settings, + search_space=search_space, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type: str = "ImageInstanceSegmentation" + self.training_data = training_data + self.primary_metric = primary_metric + self.limit_settings = limit_settings + self.sweep_settings = sweep_settings + self.validation_data = validation_data + self.validation_data_size = validation_data_size + self.model_settings = model_settings + self.search_space = search_space + + +class ImageLimitSettings(_serialization.Model): + """Limit settings for the AutoML job. + + :ivar max_concurrent_trials: Maximum number of concurrent AutoML iterations. + :vartype max_concurrent_trials: int + :ivar max_trials: Maximum number of AutoML iterations. + :vartype max_trials: int + :ivar timeout: AutoML job timeout. + :vartype timeout: ~datetime.timedelta + """ + + _attribute_map = { + "max_concurrent_trials": {"key": "maxConcurrentTrials", "type": "int"}, + "max_trials": {"key": "maxTrials", "type": "int"}, + "timeout": {"key": "timeout", "type": "duration"}, + } + + def __init__( + self, *, max_concurrent_trials: int = 1, max_trials: int = 1, timeout: datetime.timedelta = "P7D", **kwargs: Any + ) -> None: + """ + :keyword max_concurrent_trials: Maximum number of concurrent AutoML iterations. + :paramtype max_concurrent_trials: int + :keyword max_trials: Maximum number of AutoML iterations. + :paramtype max_trials: int + :keyword timeout: AutoML job timeout. + :paramtype timeout: ~datetime.timedelta + """ + super().__init__(**kwargs) + self.max_concurrent_trials = max_concurrent_trials + self.max_trials = max_trials + self.timeout = timeout + + +class ImageMetadata(_serialization.Model): + """Returns metadata about the operating system image for this compute instance. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar current_image_version: Specifies the current operating system image version this compute + instance is running on. + :vartype current_image_version: str + :ivar latest_image_version: Specifies the latest available operating system image version. + :vartype latest_image_version: str + :ivar is_latest_os_image_version: Specifies whether this compute instance is running on the + latest operating system image. + :vartype is_latest_os_image_version: bool + :ivar os_patching_status: Metadata about the os patching. + :vartype os_patching_status: ~azure.mgmt.machinelearningservices.models.OsPatchingStatus + """ + + _validation = { + "os_patching_status": {"readonly": True}, + } + + _attribute_map = { + "current_image_version": {"key": "currentImageVersion", "type": "str"}, + "latest_image_version": {"key": "latestImageVersion", "type": "str"}, + "is_latest_os_image_version": {"key": "isLatestOsImageVersion", "type": "bool"}, + "os_patching_status": {"key": "osPatchingStatus", "type": "OsPatchingStatus"}, + } + + def __init__( + self, + *, + current_image_version: Optional[str] = None, + latest_image_version: Optional[str] = None, + is_latest_os_image_version: Optional[bool] = None, + **kwargs: Any + ) -> None: + """ + :keyword current_image_version: Specifies the current operating system image version this + compute instance is running on. + :paramtype current_image_version: str + :keyword latest_image_version: Specifies the latest available operating system image version. + :paramtype latest_image_version: str + :keyword is_latest_os_image_version: Specifies whether this compute instance is running on the + latest operating system image. + :paramtype is_latest_os_image_version: bool + """ + super().__init__(**kwargs) + self.current_image_version = current_image_version + self.latest_image_version = latest_image_version + self.is_latest_os_image_version = is_latest_os_image_version + self.os_patching_status = None + + +class ImageModelDistributionSettings(_serialization.Model): # pylint: disable=too-many-instance-attributes + """Distribution expressions to sweep over values of model settings. + + :code:` + Some examples are: + ``` + ModelName = "choice('seresnext', 'resnest50')"; + LearningRate = "uniform(0.001, 0.01)"; + LayersToFreeze = "choice(0, 2)"; + ```` + All distributions can be specified as distribution_name(min, max) or choice(val1, val2, ..., + valn) + where distribution name can be: uniform, quniform, loguniform, etc + For more details on how to compose distribution expressions please check the documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: str + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: str + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: str + :ivar distributed: Whether to use distributer training. + :vartype distributed: str + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: str + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: str + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: str + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: str + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: str + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: str + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: str + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: str + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. + :vartype learning_rate_scheduler: str + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: str + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: str + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: str + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: str + :ivar optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. + :vartype optimizer: str + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: str + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: str + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: str + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: str + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: str + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: str + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: str + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: str + """ + + _attribute_map = { + "ams_gradient": {"key": "amsGradient", "type": "str"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "str"}, + "beta2": {"key": "beta2", "type": "str"}, + "distributed": {"key": "distributed", "type": "str"}, + "early_stopping": {"key": "earlyStopping", "type": "str"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "str"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "str"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "str"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "str"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "str"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "str"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "str"}, + "nesterov": {"key": "nesterov", "type": "str"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "str"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "str"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "str"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "str"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "str"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "str"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "str"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "str"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "str"}, + "weight_decay": {"key": "weightDecay", "type": "str"}, + } + + def __init__( # pylint: disable=too-many-locals + self, + *, + ams_gradient: Optional[str] = None, + augmentations: Optional[str] = None, + beta1: Optional[str] = None, + beta2: Optional[str] = None, + distributed: Optional[str] = None, + early_stopping: Optional[str] = None, + early_stopping_delay: Optional[str] = None, + early_stopping_patience: Optional[str] = None, + enable_onnx_normalization: Optional[str] = None, + evaluation_frequency: Optional[str] = None, + gradient_accumulation_step: Optional[str] = None, + layers_to_freeze: Optional[str] = None, + learning_rate: Optional[str] = None, + learning_rate_scheduler: Optional[str] = None, + model_name: Optional[str] = None, + momentum: Optional[str] = None, + nesterov: Optional[str] = None, + number_of_epochs: Optional[str] = None, + number_of_workers: Optional[str] = None, + optimizer: Optional[str] = None, + random_seed: Optional[str] = None, + step_lr_gamma: Optional[str] = None, + step_lr_step_size: Optional[str] = None, + training_batch_size: Optional[str] = None, + validation_batch_size: Optional[str] = None, + warmup_cosine_lr_cycles: Optional[str] = None, + warmup_cosine_lr_warmup_epochs: Optional[str] = None, + weight_decay: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :paramtype ams_gradient: str + :keyword augmentations: Settings for using Augmentations. + :paramtype augmentations: str + :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta1: str + :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta2: str + :keyword distributed: Whether to use distributer training. + :paramtype distributed: str + :keyword early_stopping: Enable early stopping logic during training. + :paramtype early_stopping: str + :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait + before primary metric improvement + is tracked for early stopping. Must be a positive integer. + :paramtype early_stopping_delay: str + :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :paramtype early_stopping_patience: str + :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. + :paramtype enable_onnx_normalization: str + :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. + Must be a positive integer. + :paramtype evaluation_frequency: str + :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :paramtype gradient_accumulation_step: str + :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive + integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype layers_to_freeze: str + :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :paramtype learning_rate: str + :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. + :paramtype learning_rate_scheduler: str + :keyword model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype model_name: str + :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, + 1]. + :paramtype momentum: str + :keyword nesterov: Enable nesterov when optimizer is 'sgd'. + :paramtype nesterov: str + :keyword number_of_epochs: Number of training epochs. Must be a positive integer. + :paramtype number_of_epochs: str + :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. + :paramtype number_of_workers: str + :keyword optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. + :paramtype optimizer: str + :keyword random_seed: Random seed to be used when using deterministic training. + :paramtype random_seed: str + :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float + in the range [0, 1]. + :paramtype step_lr_gamma: str + :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be + a positive integer. + :paramtype step_lr_step_size: str + :keyword training_batch_size: Training batch size. Must be a positive integer. + :paramtype training_batch_size: str + :keyword validation_batch_size: Validation batch size. Must be a positive integer. + :paramtype validation_batch_size: str + :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :paramtype warmup_cosine_lr_cycles: str + :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :paramtype warmup_cosine_lr_warmup_epochs: str + :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must + be a float in the range[0, 1]. + :paramtype weight_decay: str + """ + super().__init__(**kwargs) + self.ams_gradient = ams_gradient + self.augmentations = augmentations + self.beta1 = beta1 + self.beta2 = beta2 + self.distributed = distributed + self.early_stopping = early_stopping + self.early_stopping_delay = early_stopping_delay + self.early_stopping_patience = early_stopping_patience + self.enable_onnx_normalization = enable_onnx_normalization + self.evaluation_frequency = evaluation_frequency + self.gradient_accumulation_step = gradient_accumulation_step + self.layers_to_freeze = layers_to_freeze + self.learning_rate = learning_rate + self.learning_rate_scheduler = learning_rate_scheduler + self.model_name = model_name + self.momentum = momentum + self.nesterov = nesterov + self.number_of_epochs = number_of_epochs + self.number_of_workers = number_of_workers + self.optimizer = optimizer + self.random_seed = random_seed + self.step_lr_gamma = step_lr_gamma + self.step_lr_step_size = step_lr_step_size + self.training_batch_size = training_batch_size + self.validation_batch_size = validation_batch_size + self.warmup_cosine_lr_cycles = warmup_cosine_lr_cycles + self.warmup_cosine_lr_warmup_epochs = warmup_cosine_lr_warmup_epochs + self.weight_decay = weight_decay + + +class ImageModelDistributionSettingsClassification( + ImageModelDistributionSettings +): # pylint: disable=too-many-instance-attributes + """Distribution expressions to sweep over values of model settings. + + :code:` + Some examples are: + ``` + ModelName = "choice('seresnext', 'resnest50')"; + LearningRate = "uniform(0.001, 0.01)"; + LayersToFreeze = "choice(0, 2)"; + ```` + For more details on how to compose distribution expressions please check the documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: str + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: str + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: str + :ivar distributed: Whether to use distributer training. + :vartype distributed: str + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: str + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: str + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: str + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: str + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: str + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: str + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: str + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: str + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. + :vartype learning_rate_scheduler: str + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: str + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: str + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: str + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: str + :ivar optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. + :vartype optimizer: str + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: str + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: str + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: str + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: str + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: str + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: str + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: str + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: str + :ivar training_crop_size: Image crop size that is input to the neural network for the training + dataset. Must be a positive integer. + :vartype training_crop_size: str + :ivar validation_crop_size: Image crop size that is input to the neural network for the + validation dataset. Must be a positive integer. + :vartype validation_crop_size: str + :ivar validation_resize_size: Image size to which to resize before cropping for validation + dataset. Must be a positive integer. + :vartype validation_resize_size: str + :ivar weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. + 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be + 0 or 1 or 2. + :vartype weighted_loss: str + """ + + _attribute_map = { + "ams_gradient": {"key": "amsGradient", "type": "str"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "str"}, + "beta2": {"key": "beta2", "type": "str"}, + "distributed": {"key": "distributed", "type": "str"}, + "early_stopping": {"key": "earlyStopping", "type": "str"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "str"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "str"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "str"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "str"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "str"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "str"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "str"}, + "nesterov": {"key": "nesterov", "type": "str"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "str"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "str"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "str"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "str"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "str"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "str"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "str"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "str"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "str"}, + "weight_decay": {"key": "weightDecay", "type": "str"}, + "training_crop_size": {"key": "trainingCropSize", "type": "str"}, + "validation_crop_size": {"key": "validationCropSize", "type": "str"}, + "validation_resize_size": {"key": "validationResizeSize", "type": "str"}, + "weighted_loss": {"key": "weightedLoss", "type": "str"}, + } + + def __init__( # pylint: disable=too-many-locals + self, + *, + ams_gradient: Optional[str] = None, + augmentations: Optional[str] = None, + beta1: Optional[str] = None, + beta2: Optional[str] = None, + distributed: Optional[str] = None, + early_stopping: Optional[str] = None, + early_stopping_delay: Optional[str] = None, + early_stopping_patience: Optional[str] = None, + enable_onnx_normalization: Optional[str] = None, + evaluation_frequency: Optional[str] = None, + gradient_accumulation_step: Optional[str] = None, + layers_to_freeze: Optional[str] = None, + learning_rate: Optional[str] = None, + learning_rate_scheduler: Optional[str] = None, + model_name: Optional[str] = None, + momentum: Optional[str] = None, + nesterov: Optional[str] = None, + number_of_epochs: Optional[str] = None, + number_of_workers: Optional[str] = None, + optimizer: Optional[str] = None, + random_seed: Optional[str] = None, + step_lr_gamma: Optional[str] = None, + step_lr_step_size: Optional[str] = None, + training_batch_size: Optional[str] = None, + validation_batch_size: Optional[str] = None, + warmup_cosine_lr_cycles: Optional[str] = None, + warmup_cosine_lr_warmup_epochs: Optional[str] = None, + weight_decay: Optional[str] = None, + training_crop_size: Optional[str] = None, + validation_crop_size: Optional[str] = None, + validation_resize_size: Optional[str] = None, + weighted_loss: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :paramtype ams_gradient: str + :keyword augmentations: Settings for using Augmentations. + :paramtype augmentations: str + :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta1: str + :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta2: str + :keyword distributed: Whether to use distributer training. + :paramtype distributed: str + :keyword early_stopping: Enable early stopping logic during training. + :paramtype early_stopping: str + :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait + before primary metric improvement + is tracked for early stopping. Must be a positive integer. + :paramtype early_stopping_delay: str + :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :paramtype early_stopping_patience: str + :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. + :paramtype enable_onnx_normalization: str + :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. + Must be a positive integer. + :paramtype evaluation_frequency: str + :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :paramtype gradient_accumulation_step: str + :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive + integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype layers_to_freeze: str + :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :paramtype learning_rate: str + :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. + :paramtype learning_rate_scheduler: str + :keyword model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype model_name: str + :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, + 1]. + :paramtype momentum: str + :keyword nesterov: Enable nesterov when optimizer is 'sgd'. + :paramtype nesterov: str + :keyword number_of_epochs: Number of training epochs. Must be a positive integer. + :paramtype number_of_epochs: str + :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. + :paramtype number_of_workers: str + :keyword optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. + :paramtype optimizer: str + :keyword random_seed: Random seed to be used when using deterministic training. + :paramtype random_seed: str + :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float + in the range [0, 1]. + :paramtype step_lr_gamma: str + :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be + a positive integer. + :paramtype step_lr_step_size: str + :keyword training_batch_size: Training batch size. Must be a positive integer. + :paramtype training_batch_size: str + :keyword validation_batch_size: Validation batch size. Must be a positive integer. + :paramtype validation_batch_size: str + :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :paramtype warmup_cosine_lr_cycles: str + :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :paramtype warmup_cosine_lr_warmup_epochs: str + :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must + be a float in the range[0, 1]. + :paramtype weight_decay: str + :keyword training_crop_size: Image crop size that is input to the neural network for the + training dataset. Must be a positive integer. + :paramtype training_crop_size: str + :keyword validation_crop_size: Image crop size that is input to the neural network for the + validation dataset. Must be a positive integer. + :paramtype validation_crop_size: str + :keyword validation_resize_size: Image size to which to resize before cropping for validation + dataset. Must be a positive integer. + :paramtype validation_resize_size: str + :keyword weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. + 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be + 0 or 1 or 2. + :paramtype weighted_loss: str + """ + super().__init__( + ams_gradient=ams_gradient, + augmentations=augmentations, + beta1=beta1, + beta2=beta2, + distributed=distributed, + early_stopping=early_stopping, + early_stopping_delay=early_stopping_delay, + early_stopping_patience=early_stopping_patience, + enable_onnx_normalization=enable_onnx_normalization, + evaluation_frequency=evaluation_frequency, + gradient_accumulation_step=gradient_accumulation_step, + layers_to_freeze=layers_to_freeze, + learning_rate=learning_rate, + learning_rate_scheduler=learning_rate_scheduler, + model_name=model_name, + momentum=momentum, + nesterov=nesterov, + number_of_epochs=number_of_epochs, + number_of_workers=number_of_workers, + optimizer=optimizer, + random_seed=random_seed, + step_lr_gamma=step_lr_gamma, + step_lr_step_size=step_lr_step_size, + training_batch_size=training_batch_size, + validation_batch_size=validation_batch_size, + warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, + warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, + weight_decay=weight_decay, + **kwargs + ) + self.training_crop_size = training_crop_size + self.validation_crop_size = validation_crop_size + self.validation_resize_size = validation_resize_size + self.weighted_loss = weighted_loss + + +class ImageModelDistributionSettingsObjectDetection( + ImageModelDistributionSettings +): # pylint: disable=too-many-instance-attributes + """Distribution expressions to sweep over values of model settings. + + :code:` + Some examples are: + ``` + ModelName = "choice('seresnext', 'resnest50')"; + LearningRate = "uniform(0.001, 0.01)"; + LayersToFreeze = "choice(0, 2)"; + ```` + For more details on how to compose distribution expressions please check the documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: str + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: str + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: str + :ivar distributed: Whether to use distributer training. + :vartype distributed: str + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: str + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: str + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: str + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: str + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: str + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: str + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: str + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: str + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. + :vartype learning_rate_scheduler: str + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: str + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: str + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: str + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: str + :ivar optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. + :vartype optimizer: str + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: str + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: str + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: str + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: str + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: str + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: str + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: str + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: str + :ivar box_detections_per_image: Maximum number of detections per image, for all classes. Must + be a positive integer. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype box_detections_per_image: str + :ivar box_score_threshold: During inference, only return proposals with a classification score + greater than + BoxScoreThreshold. Must be a float in the range[0, 1]. + :vartype box_score_threshold: str + :ivar image_size: Image size for train and validation. Must be a positive integer. + Note: The training run may get into CUDA OOM if the size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. + :vartype image_size: str + :ivar max_size: Maximum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype max_size: str + :ivar min_size: Minimum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype min_size: str + :ivar model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. + Note: training run may get into CUDA OOM if the model size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. + :vartype model_size: str + :ivar multi_scale: Enable multi-scale image by varying image size by +/- 50%. + Note: training run may get into CUDA OOM if no sufficient GPU memory. + Note: This settings is only supported for the 'yolov5' algorithm. + :vartype multi_scale: str + :ivar nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be + float in the range [0, 1]. + :vartype nms_iou_threshold: str + :ivar tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must not + be + None to enable small object detection logic. A string containing two integers in mxn format. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype tile_grid_size: str + :ivar tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be float + in the range [0, 1). + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype tile_overlap_ratio: str + :ivar tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging + predictions from tiles and image. + Used in validation/ inference. Must be float in the range [0, 1]. + Note: This settings is not supported for the 'yolov5' algorithm. + NMS: Non-maximum suppression. + :vartype tile_predictions_nms_threshold: str + :ivar validation_iou_threshold: IOU threshold to use when computing validation metric. Must be + float in the range [0, 1]. + :vartype validation_iou_threshold: str + :ivar validation_metric_type: Metric computation method to use for validation metrics. Must be + 'none', 'coco', 'voc', or 'coco_voc'. + :vartype validation_metric_type: str + """ + + _attribute_map = { + "ams_gradient": {"key": "amsGradient", "type": "str"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "str"}, + "beta2": {"key": "beta2", "type": "str"}, + "distributed": {"key": "distributed", "type": "str"}, + "early_stopping": {"key": "earlyStopping", "type": "str"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "str"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "str"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "str"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "str"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "str"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "str"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "str"}, + "nesterov": {"key": "nesterov", "type": "str"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "str"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "str"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "str"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "str"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "str"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "str"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "str"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "str"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "str"}, + "weight_decay": {"key": "weightDecay", "type": "str"}, + "box_detections_per_image": {"key": "boxDetectionsPerImage", "type": "str"}, + "box_score_threshold": {"key": "boxScoreThreshold", "type": "str"}, + "image_size": {"key": "imageSize", "type": "str"}, + "max_size": {"key": "maxSize", "type": "str"}, + "min_size": {"key": "minSize", "type": "str"}, + "model_size": {"key": "modelSize", "type": "str"}, + "multi_scale": {"key": "multiScale", "type": "str"}, + "nms_iou_threshold": {"key": "nmsIouThreshold", "type": "str"}, + "tile_grid_size": {"key": "tileGridSize", "type": "str"}, + "tile_overlap_ratio": {"key": "tileOverlapRatio", "type": "str"}, + "tile_predictions_nms_threshold": {"key": "tilePredictionsNmsThreshold", "type": "str"}, + "validation_iou_threshold": {"key": "validationIouThreshold", "type": "str"}, + "validation_metric_type": {"key": "validationMetricType", "type": "str"}, + } + + def __init__( # pylint: disable=too-many-locals + self, + *, + ams_gradient: Optional[str] = None, + augmentations: Optional[str] = None, + beta1: Optional[str] = None, + beta2: Optional[str] = None, + distributed: Optional[str] = None, + early_stopping: Optional[str] = None, + early_stopping_delay: Optional[str] = None, + early_stopping_patience: Optional[str] = None, + enable_onnx_normalization: Optional[str] = None, + evaluation_frequency: Optional[str] = None, + gradient_accumulation_step: Optional[str] = None, + layers_to_freeze: Optional[str] = None, + learning_rate: Optional[str] = None, + learning_rate_scheduler: Optional[str] = None, + model_name: Optional[str] = None, + momentum: Optional[str] = None, + nesterov: Optional[str] = None, + number_of_epochs: Optional[str] = None, + number_of_workers: Optional[str] = None, + optimizer: Optional[str] = None, + random_seed: Optional[str] = None, + step_lr_gamma: Optional[str] = None, + step_lr_step_size: Optional[str] = None, + training_batch_size: Optional[str] = None, + validation_batch_size: Optional[str] = None, + warmup_cosine_lr_cycles: Optional[str] = None, + warmup_cosine_lr_warmup_epochs: Optional[str] = None, + weight_decay: Optional[str] = None, + box_detections_per_image: Optional[str] = None, + box_score_threshold: Optional[str] = None, + image_size: Optional[str] = None, + max_size: Optional[str] = None, + min_size: Optional[str] = None, + model_size: Optional[str] = None, + multi_scale: Optional[str] = None, + nms_iou_threshold: Optional[str] = None, + tile_grid_size: Optional[str] = None, + tile_overlap_ratio: Optional[str] = None, + tile_predictions_nms_threshold: Optional[str] = None, + validation_iou_threshold: Optional[str] = None, + validation_metric_type: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :paramtype ams_gradient: str + :keyword augmentations: Settings for using Augmentations. + :paramtype augmentations: str + :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta1: str + :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta2: str + :keyword distributed: Whether to use distributer training. + :paramtype distributed: str + :keyword early_stopping: Enable early stopping logic during training. + :paramtype early_stopping: str + :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait + before primary metric improvement + is tracked for early stopping. Must be a positive integer. + :paramtype early_stopping_delay: str + :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :paramtype early_stopping_patience: str + :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. + :paramtype enable_onnx_normalization: str + :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. + Must be a positive integer. + :paramtype evaluation_frequency: str + :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :paramtype gradient_accumulation_step: str + :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive + integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype layers_to_freeze: str + :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :paramtype learning_rate: str + :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. + :paramtype learning_rate_scheduler: str + :keyword model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype model_name: str + :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, + 1]. + :paramtype momentum: str + :keyword nesterov: Enable nesterov when optimizer is 'sgd'. + :paramtype nesterov: str + :keyword number_of_epochs: Number of training epochs. Must be a positive integer. + :paramtype number_of_epochs: str + :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. + :paramtype number_of_workers: str + :keyword optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. + :paramtype optimizer: str + :keyword random_seed: Random seed to be used when using deterministic training. + :paramtype random_seed: str + :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float + in the range [0, 1]. + :paramtype step_lr_gamma: str + :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be + a positive integer. + :paramtype step_lr_step_size: str + :keyword training_batch_size: Training batch size. Must be a positive integer. + :paramtype training_batch_size: str + :keyword validation_batch_size: Validation batch size. Must be a positive integer. + :paramtype validation_batch_size: str + :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :paramtype warmup_cosine_lr_cycles: str + :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :paramtype warmup_cosine_lr_warmup_epochs: str + :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must + be a float in the range[0, 1]. + :paramtype weight_decay: str + :keyword box_detections_per_image: Maximum number of detections per image, for all classes. + Must be a positive integer. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype box_detections_per_image: str + :keyword box_score_threshold: During inference, only return proposals with a classification + score greater than + BoxScoreThreshold. Must be a float in the range[0, 1]. + :paramtype box_score_threshold: str + :keyword image_size: Image size for train and validation. Must be a positive integer. + Note: The training run may get into CUDA OOM if the size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. + :paramtype image_size: str + :keyword max_size: Maximum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype max_size: str + :keyword min_size: Minimum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype min_size: str + :keyword model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. + Note: training run may get into CUDA OOM if the model size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. + :paramtype model_size: str + :keyword multi_scale: Enable multi-scale image by varying image size by +/- 50%. + Note: training run may get into CUDA OOM if no sufficient GPU memory. + Note: This settings is only supported for the 'yolov5' algorithm. + :paramtype multi_scale: str + :keyword nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be + float in the range [0, 1]. + :paramtype nms_iou_threshold: str + :keyword tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must + not be + None to enable small object detection logic. A string containing two integers in mxn format. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype tile_grid_size: str + :keyword tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be + float in the range [0, 1). + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype tile_overlap_ratio: str + :keyword tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging + predictions from tiles and image. + Used in validation/ inference. Must be float in the range [0, 1]. + Note: This settings is not supported for the 'yolov5' algorithm. + NMS: Non-maximum suppression. + :paramtype tile_predictions_nms_threshold: str + :keyword validation_iou_threshold: IOU threshold to use when computing validation metric. Must + be float in the range [0, 1]. + :paramtype validation_iou_threshold: str + :keyword validation_metric_type: Metric computation method to use for validation metrics. Must + be 'none', 'coco', 'voc', or 'coco_voc'. + :paramtype validation_metric_type: str + """ + super().__init__( + ams_gradient=ams_gradient, + augmentations=augmentations, + beta1=beta1, + beta2=beta2, + distributed=distributed, + early_stopping=early_stopping, + early_stopping_delay=early_stopping_delay, + early_stopping_patience=early_stopping_patience, + enable_onnx_normalization=enable_onnx_normalization, + evaluation_frequency=evaluation_frequency, + gradient_accumulation_step=gradient_accumulation_step, + layers_to_freeze=layers_to_freeze, + learning_rate=learning_rate, + learning_rate_scheduler=learning_rate_scheduler, + model_name=model_name, + momentum=momentum, + nesterov=nesterov, + number_of_epochs=number_of_epochs, + number_of_workers=number_of_workers, + optimizer=optimizer, + random_seed=random_seed, + step_lr_gamma=step_lr_gamma, + step_lr_step_size=step_lr_step_size, + training_batch_size=training_batch_size, + validation_batch_size=validation_batch_size, + warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, + warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, + weight_decay=weight_decay, + **kwargs + ) + self.box_detections_per_image = box_detections_per_image + self.box_score_threshold = box_score_threshold + self.image_size = image_size + self.max_size = max_size + self.min_size = min_size + self.model_size = model_size + self.multi_scale = multi_scale + self.nms_iou_threshold = nms_iou_threshold + self.tile_grid_size = tile_grid_size + self.tile_overlap_ratio = tile_overlap_ratio + self.tile_predictions_nms_threshold = tile_predictions_nms_threshold + self.validation_iou_threshold = validation_iou_threshold + self.validation_metric_type = validation_metric_type + + +class ImageModelSettings(_serialization.Model): # pylint: disable=too-many-instance-attributes + """Settings used for training the model. + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar advanced_settings: Settings for advanced scenarios. + :vartype advanced_settings: str + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: bool + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: float + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: float + :ivar checkpoint_frequency: Frequency to store model checkpoints. Must be a positive integer. + :vartype checkpoint_frequency: int + :ivar checkpoint_model: The pretrained checkpoint model for incremental training. + :vartype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput + :ivar checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for + incremental training. + :vartype checkpoint_run_id: str + :ivar distributed: Whether to use distributed training. + :vartype distributed: bool + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: bool + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: int + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: int + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: bool + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: int + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: int + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: int + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: float + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. Known values are: "None", "WarmupCosine", and "Step". + :vartype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.LearningRateScheduler + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: float + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: bool + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: int + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: int + :ivar optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", and "Adamw". + :vartype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: int + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: float + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: int + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: int + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: int + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: float + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: int + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: float + """ + + _attribute_map = { + "advanced_settings": {"key": "advancedSettings", "type": "str"}, + "ams_gradient": {"key": "amsGradient", "type": "bool"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "float"}, + "beta2": {"key": "beta2", "type": "float"}, + "checkpoint_frequency": {"key": "checkpointFrequency", "type": "int"}, + "checkpoint_model": {"key": "checkpointModel", "type": "MLFlowModelJobInput"}, + "checkpoint_run_id": {"key": "checkpointRunId", "type": "str"}, + "distributed": {"key": "distributed", "type": "bool"}, + "early_stopping": {"key": "earlyStopping", "type": "bool"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "int"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "int"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "bool"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "int"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "int"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "int"}, + "learning_rate": {"key": "learningRate", "type": "float"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "float"}, + "nesterov": {"key": "nesterov", "type": "bool"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "int"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "int"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "int"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "float"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "int"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "int"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "int"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "float"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "int"}, + "weight_decay": {"key": "weightDecay", "type": "float"}, + } + + def __init__( # pylint: disable=too-many-locals + self, + *, + advanced_settings: Optional[str] = None, + ams_gradient: Optional[bool] = None, + augmentations: Optional[str] = None, + beta1: Optional[float] = None, + beta2: Optional[float] = None, + checkpoint_frequency: Optional[int] = None, + checkpoint_model: Optional["_models.MLFlowModelJobInput"] = None, + checkpoint_run_id: Optional[str] = None, + distributed: Optional[bool] = None, + early_stopping: Optional[bool] = None, + early_stopping_delay: Optional[int] = None, + early_stopping_patience: Optional[int] = None, + enable_onnx_normalization: Optional[bool] = None, + evaluation_frequency: Optional[int] = None, + gradient_accumulation_step: Optional[int] = None, + layers_to_freeze: Optional[int] = None, + learning_rate: Optional[float] = None, + learning_rate_scheduler: Optional[Union[str, "_models.LearningRateScheduler"]] = None, + model_name: Optional[str] = None, + momentum: Optional[float] = None, + nesterov: Optional[bool] = None, + number_of_epochs: Optional[int] = None, + number_of_workers: Optional[int] = None, + optimizer: Optional[Union[str, "_models.StochasticOptimizer"]] = None, + random_seed: Optional[int] = None, + step_lr_gamma: Optional[float] = None, + step_lr_step_size: Optional[int] = None, + training_batch_size: Optional[int] = None, + validation_batch_size: Optional[int] = None, + warmup_cosine_lr_cycles: Optional[float] = None, + warmup_cosine_lr_warmup_epochs: Optional[int] = None, + weight_decay: Optional[float] = None, + **kwargs: Any + ) -> None: + """ + :keyword advanced_settings: Settings for advanced scenarios. + :paramtype advanced_settings: str + :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :paramtype ams_gradient: bool + :keyword augmentations: Settings for using Augmentations. + :paramtype augmentations: str + :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta1: float + :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta2: float + :keyword checkpoint_frequency: Frequency to store model checkpoints. Must be a positive + integer. + :paramtype checkpoint_frequency: int + :keyword checkpoint_model: The pretrained checkpoint model for incremental training. + :paramtype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput + :keyword checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for + incremental training. + :paramtype checkpoint_run_id: str + :keyword distributed: Whether to use distributed training. + :paramtype distributed: bool + :keyword early_stopping: Enable early stopping logic during training. + :paramtype early_stopping: bool + :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait + before primary metric improvement + is tracked for early stopping. Must be a positive integer. + :paramtype early_stopping_delay: int + :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :paramtype early_stopping_patience: int + :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. + :paramtype enable_onnx_normalization: bool + :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. + Must be a positive integer. + :paramtype evaluation_frequency: int + :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :paramtype gradient_accumulation_step: int + :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive + integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype layers_to_freeze: int + :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :paramtype learning_rate: float + :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. Known values are: "None", "WarmupCosine", and "Step". + :paramtype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.LearningRateScheduler + :keyword model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype model_name: str + :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, + 1]. + :paramtype momentum: float + :keyword nesterov: Enable nesterov when optimizer is 'sgd'. + :paramtype nesterov: bool + :keyword number_of_epochs: Number of training epochs. Must be a positive integer. + :paramtype number_of_epochs: int + :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. + :paramtype number_of_workers: int + :keyword optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", and "Adamw". + :paramtype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer + :keyword random_seed: Random seed to be used when using deterministic training. + :paramtype random_seed: int + :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float + in the range [0, 1]. + :paramtype step_lr_gamma: float + :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be + a positive integer. + :paramtype step_lr_step_size: int + :keyword training_batch_size: Training batch size. Must be a positive integer. + :paramtype training_batch_size: int + :keyword validation_batch_size: Validation batch size. Must be a positive integer. + :paramtype validation_batch_size: int + :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :paramtype warmup_cosine_lr_cycles: float + :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :paramtype warmup_cosine_lr_warmup_epochs: int + :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must + be a float in the range[0, 1]. + :paramtype weight_decay: float + """ + super().__init__(**kwargs) + self.advanced_settings = advanced_settings + self.ams_gradient = ams_gradient + self.augmentations = augmentations + self.beta1 = beta1 + self.beta2 = beta2 + self.checkpoint_frequency = checkpoint_frequency + self.checkpoint_model = checkpoint_model + self.checkpoint_run_id = checkpoint_run_id + self.distributed = distributed + self.early_stopping = early_stopping + self.early_stopping_delay = early_stopping_delay + self.early_stopping_patience = early_stopping_patience + self.enable_onnx_normalization = enable_onnx_normalization + self.evaluation_frequency = evaluation_frequency + self.gradient_accumulation_step = gradient_accumulation_step + self.layers_to_freeze = layers_to_freeze + self.learning_rate = learning_rate + self.learning_rate_scheduler = learning_rate_scheduler + self.model_name = model_name + self.momentum = momentum + self.nesterov = nesterov + self.number_of_epochs = number_of_epochs + self.number_of_workers = number_of_workers + self.optimizer = optimizer + self.random_seed = random_seed + self.step_lr_gamma = step_lr_gamma + self.step_lr_step_size = step_lr_step_size + self.training_batch_size = training_batch_size + self.validation_batch_size = validation_batch_size + self.warmup_cosine_lr_cycles = warmup_cosine_lr_cycles + self.warmup_cosine_lr_warmup_epochs = warmup_cosine_lr_warmup_epochs + self.weight_decay = weight_decay + + +class ImageModelSettingsClassification(ImageModelSettings): # pylint: disable=too-many-instance-attributes + """Settings used for training the model. + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar advanced_settings: Settings for advanced scenarios. + :vartype advanced_settings: str + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: bool + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: float + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: float + :ivar checkpoint_frequency: Frequency to store model checkpoints. Must be a positive integer. + :vartype checkpoint_frequency: int + :ivar checkpoint_model: The pretrained checkpoint model for incremental training. + :vartype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput + :ivar checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for + incremental training. + :vartype checkpoint_run_id: str + :ivar distributed: Whether to use distributed training. + :vartype distributed: bool + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: bool + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: int + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: int + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: bool + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: int + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: int + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: int + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: float + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. Known values are: "None", "WarmupCosine", and "Step". + :vartype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.LearningRateScheduler + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: float + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: bool + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: int + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: int + :ivar optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", and "Adamw". + :vartype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: int + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: float + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: int + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: int + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: int + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: float + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: int + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: float + :ivar training_crop_size: Image crop size that is input to the neural network for the training + dataset. Must be a positive integer. + :vartype training_crop_size: int + :ivar validation_crop_size: Image crop size that is input to the neural network for the + validation dataset. Must be a positive integer. + :vartype validation_crop_size: int + :ivar validation_resize_size: Image size to which to resize before cropping for validation + dataset. Must be a positive integer. + :vartype validation_resize_size: int + :ivar weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. + 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be + 0 or 1 or 2. + :vartype weighted_loss: int + """ + + _attribute_map = { + "advanced_settings": {"key": "advancedSettings", "type": "str"}, + "ams_gradient": {"key": "amsGradient", "type": "bool"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "float"}, + "beta2": {"key": "beta2", "type": "float"}, + "checkpoint_frequency": {"key": "checkpointFrequency", "type": "int"}, + "checkpoint_model": {"key": "checkpointModel", "type": "MLFlowModelJobInput"}, + "checkpoint_run_id": {"key": "checkpointRunId", "type": "str"}, + "distributed": {"key": "distributed", "type": "bool"}, + "early_stopping": {"key": "earlyStopping", "type": "bool"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "int"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "int"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "bool"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "int"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "int"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "int"}, + "learning_rate": {"key": "learningRate", "type": "float"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "float"}, + "nesterov": {"key": "nesterov", "type": "bool"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "int"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "int"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "int"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "float"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "int"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "int"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "int"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "float"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "int"}, + "weight_decay": {"key": "weightDecay", "type": "float"}, + "training_crop_size": {"key": "trainingCropSize", "type": "int"}, + "validation_crop_size": {"key": "validationCropSize", "type": "int"}, + "validation_resize_size": {"key": "validationResizeSize", "type": "int"}, + "weighted_loss": {"key": "weightedLoss", "type": "int"}, + } + + def __init__( # pylint: disable=too-many-locals + self, + *, + advanced_settings: Optional[str] = None, + ams_gradient: Optional[bool] = None, + augmentations: Optional[str] = None, + beta1: Optional[float] = None, + beta2: Optional[float] = None, + checkpoint_frequency: Optional[int] = None, + checkpoint_model: Optional["_models.MLFlowModelJobInput"] = None, + checkpoint_run_id: Optional[str] = None, + distributed: Optional[bool] = None, + early_stopping: Optional[bool] = None, + early_stopping_delay: Optional[int] = None, + early_stopping_patience: Optional[int] = None, + enable_onnx_normalization: Optional[bool] = None, + evaluation_frequency: Optional[int] = None, + gradient_accumulation_step: Optional[int] = None, + layers_to_freeze: Optional[int] = None, + learning_rate: Optional[float] = None, + learning_rate_scheduler: Optional[Union[str, "_models.LearningRateScheduler"]] = None, + model_name: Optional[str] = None, + momentum: Optional[float] = None, + nesterov: Optional[bool] = None, + number_of_epochs: Optional[int] = None, + number_of_workers: Optional[int] = None, + optimizer: Optional[Union[str, "_models.StochasticOptimizer"]] = None, + random_seed: Optional[int] = None, + step_lr_gamma: Optional[float] = None, + step_lr_step_size: Optional[int] = None, + training_batch_size: Optional[int] = None, + validation_batch_size: Optional[int] = None, + warmup_cosine_lr_cycles: Optional[float] = None, + warmup_cosine_lr_warmup_epochs: Optional[int] = None, + weight_decay: Optional[float] = None, + training_crop_size: Optional[int] = None, + validation_crop_size: Optional[int] = None, + validation_resize_size: Optional[int] = None, + weighted_loss: Optional[int] = None, + **kwargs: Any + ) -> None: + """ + :keyword advanced_settings: Settings for advanced scenarios. + :paramtype advanced_settings: str + :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :paramtype ams_gradient: bool + :keyword augmentations: Settings for using Augmentations. + :paramtype augmentations: str + :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta1: float + :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta2: float + :keyword checkpoint_frequency: Frequency to store model checkpoints. Must be a positive + integer. + :paramtype checkpoint_frequency: int + :keyword checkpoint_model: The pretrained checkpoint model for incremental training. + :paramtype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput + :keyword checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for + incremental training. + :paramtype checkpoint_run_id: str + :keyword distributed: Whether to use distributed training. + :paramtype distributed: bool + :keyword early_stopping: Enable early stopping logic during training. + :paramtype early_stopping: bool + :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait + before primary metric improvement + is tracked for early stopping. Must be a positive integer. + :paramtype early_stopping_delay: int + :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :paramtype early_stopping_patience: int + :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. + :paramtype enable_onnx_normalization: bool + :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. + Must be a positive integer. + :paramtype evaluation_frequency: int + :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :paramtype gradient_accumulation_step: int + :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive + integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype layers_to_freeze: int + :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :paramtype learning_rate: float + :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. Known values are: "None", "WarmupCosine", and "Step". + :paramtype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.LearningRateScheduler + :keyword model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype model_name: str + :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, + 1]. + :paramtype momentum: float + :keyword nesterov: Enable nesterov when optimizer is 'sgd'. + :paramtype nesterov: bool + :keyword number_of_epochs: Number of training epochs. Must be a positive integer. + :paramtype number_of_epochs: int + :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. + :paramtype number_of_workers: int + :keyword optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", and "Adamw". + :paramtype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer + :keyword random_seed: Random seed to be used when using deterministic training. + :paramtype random_seed: int + :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float + in the range [0, 1]. + :paramtype step_lr_gamma: float + :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be + a positive integer. + :paramtype step_lr_step_size: int + :keyword training_batch_size: Training batch size. Must be a positive integer. + :paramtype training_batch_size: int + :keyword validation_batch_size: Validation batch size. Must be a positive integer. + :paramtype validation_batch_size: int + :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :paramtype warmup_cosine_lr_cycles: float + :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :paramtype warmup_cosine_lr_warmup_epochs: int + :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must + be a float in the range[0, 1]. + :paramtype weight_decay: float + :keyword training_crop_size: Image crop size that is input to the neural network for the + training dataset. Must be a positive integer. + :paramtype training_crop_size: int + :keyword validation_crop_size: Image crop size that is input to the neural network for the + validation dataset. Must be a positive integer. + :paramtype validation_crop_size: int + :keyword validation_resize_size: Image size to which to resize before cropping for validation + dataset. Must be a positive integer. + :paramtype validation_resize_size: int + :keyword weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. + 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be + 0 or 1 or 2. + :paramtype weighted_loss: int + """ + super().__init__( + advanced_settings=advanced_settings, + ams_gradient=ams_gradient, + augmentations=augmentations, + beta1=beta1, + beta2=beta2, + checkpoint_frequency=checkpoint_frequency, + checkpoint_model=checkpoint_model, + checkpoint_run_id=checkpoint_run_id, + distributed=distributed, + early_stopping=early_stopping, + early_stopping_delay=early_stopping_delay, + early_stopping_patience=early_stopping_patience, + enable_onnx_normalization=enable_onnx_normalization, + evaluation_frequency=evaluation_frequency, + gradient_accumulation_step=gradient_accumulation_step, + layers_to_freeze=layers_to_freeze, + learning_rate=learning_rate, + learning_rate_scheduler=learning_rate_scheduler, + model_name=model_name, + momentum=momentum, + nesterov=nesterov, + number_of_epochs=number_of_epochs, + number_of_workers=number_of_workers, + optimizer=optimizer, + random_seed=random_seed, + step_lr_gamma=step_lr_gamma, + step_lr_step_size=step_lr_step_size, + training_batch_size=training_batch_size, + validation_batch_size=validation_batch_size, + warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, + warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, + weight_decay=weight_decay, + **kwargs + ) + self.training_crop_size = training_crop_size + self.validation_crop_size = validation_crop_size + self.validation_resize_size = validation_resize_size + self.weighted_loss = weighted_loss + + +class ImageModelSettingsObjectDetection(ImageModelSettings): # pylint: disable=too-many-instance-attributes + """Settings used for training the model. + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar advanced_settings: Settings for advanced scenarios. + :vartype advanced_settings: str + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: bool :ivar augmentations: Settings for using Augmentations. :vartype augmentations: str :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range @@ -12330,3439 +17546,6697 @@ class ImageModelSettingsClassification(ImageModelSettings): # pylint: disable=t :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be a float in the range[0, 1]. :vartype weight_decay: float - :ivar training_crop_size: Image crop size that is input to the neural network for the training - dataset. Must be a positive integer. - :vartype training_crop_size: int - :ivar validation_crop_size: Image crop size that is input to the neural network for the - validation dataset. Must be a positive integer. - :vartype validation_crop_size: int - :ivar validation_resize_size: Image size to which to resize before cropping for validation - dataset. Must be a positive integer. - :vartype validation_resize_size: int - :ivar weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. - 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be - 0 or 1 or 2. - :vartype weighted_loss: int + :ivar box_detections_per_image: Maximum number of detections per image, for all classes. Must + be a positive integer. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype box_detections_per_image: int + :ivar box_score_threshold: During inference, only return proposals with a classification score + greater than + BoxScoreThreshold. Must be a float in the range[0, 1]. + :vartype box_score_threshold: float + :ivar image_size: Image size for train and validation. Must be a positive integer. + Note: The training run may get into CUDA OOM if the size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. + :vartype image_size: int + :ivar log_training_metrics: Enable computing and logging training metrics. Known values are: + "Enable" and "Disable". + :vartype log_training_metrics: str or + ~azure.mgmt.machinelearningservices.models.LogTrainingMetrics + :ivar log_validation_loss: Enable computing and logging validation loss. Known values are: + "Enable" and "Disable". + :vartype log_validation_loss: str or + ~azure.mgmt.machinelearningservices.models.LogValidationLoss + :ivar max_size: Maximum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype max_size: int + :ivar min_size: Minimum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype min_size: int + :ivar model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. + Note: training run may get into CUDA OOM if the model size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. Known values are: "None", + "Small", "Medium", "Large", and "ExtraLarge". + :vartype model_size: str or ~azure.mgmt.machinelearningservices.models.ModelSize + :ivar multi_scale: Enable multi-scale image by varying image size by +/- 50%. + Note: training run may get into CUDA OOM if no sufficient GPU memory. + Note: This settings is only supported for the 'yolov5' algorithm. + :vartype multi_scale: bool + :ivar nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be a + float in the range [0, 1]. + :vartype nms_iou_threshold: float + :ivar tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must not + be + None to enable small object detection logic. A string containing two integers in mxn format. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype tile_grid_size: str + :ivar tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be float + in the range [0, 1). + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype tile_overlap_ratio: float + :ivar tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging + predictions from tiles and image. + Used in validation/ inference. Must be float in the range [0, 1]. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype tile_predictions_nms_threshold: float + :ivar validation_iou_threshold: IOU threshold to use when computing validation metric. Must be + float in the range [0, 1]. + :vartype validation_iou_threshold: float + :ivar validation_metric_type: Metric computation method to use for validation metrics. Known + values are: "None", "Coco", "Voc", and "CocoVoc". + :vartype validation_metric_type: str or + ~azure.mgmt.machinelearningservices.models.ValidationMetricType + """ + + _attribute_map = { + "advanced_settings": {"key": "advancedSettings", "type": "str"}, + "ams_gradient": {"key": "amsGradient", "type": "bool"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "float"}, + "beta2": {"key": "beta2", "type": "float"}, + "checkpoint_frequency": {"key": "checkpointFrequency", "type": "int"}, + "checkpoint_model": {"key": "checkpointModel", "type": "MLFlowModelJobInput"}, + "checkpoint_run_id": {"key": "checkpointRunId", "type": "str"}, + "distributed": {"key": "distributed", "type": "bool"}, + "early_stopping": {"key": "earlyStopping", "type": "bool"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "int"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "int"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "bool"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "int"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "int"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "int"}, + "learning_rate": {"key": "learningRate", "type": "float"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "float"}, + "nesterov": {"key": "nesterov", "type": "bool"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "int"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "int"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "int"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "float"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "int"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "int"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "int"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "float"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "int"}, + "weight_decay": {"key": "weightDecay", "type": "float"}, + "box_detections_per_image": {"key": "boxDetectionsPerImage", "type": "int"}, + "box_score_threshold": {"key": "boxScoreThreshold", "type": "float"}, + "image_size": {"key": "imageSize", "type": "int"}, + "log_training_metrics": {"key": "logTrainingMetrics", "type": "str"}, + "log_validation_loss": {"key": "logValidationLoss", "type": "str"}, + "max_size": {"key": "maxSize", "type": "int"}, + "min_size": {"key": "minSize", "type": "int"}, + "model_size": {"key": "modelSize", "type": "str"}, + "multi_scale": {"key": "multiScale", "type": "bool"}, + "nms_iou_threshold": {"key": "nmsIouThreshold", "type": "float"}, + "tile_grid_size": {"key": "tileGridSize", "type": "str"}, + "tile_overlap_ratio": {"key": "tileOverlapRatio", "type": "float"}, + "tile_predictions_nms_threshold": {"key": "tilePredictionsNmsThreshold", "type": "float"}, + "validation_iou_threshold": {"key": "validationIouThreshold", "type": "float"}, + "validation_metric_type": {"key": "validationMetricType", "type": "str"}, + } + + def __init__( # pylint: disable=too-many-locals + self, + *, + advanced_settings: Optional[str] = None, + ams_gradient: Optional[bool] = None, + augmentations: Optional[str] = None, + beta1: Optional[float] = None, + beta2: Optional[float] = None, + checkpoint_frequency: Optional[int] = None, + checkpoint_model: Optional["_models.MLFlowModelJobInput"] = None, + checkpoint_run_id: Optional[str] = None, + distributed: Optional[bool] = None, + early_stopping: Optional[bool] = None, + early_stopping_delay: Optional[int] = None, + early_stopping_patience: Optional[int] = None, + enable_onnx_normalization: Optional[bool] = None, + evaluation_frequency: Optional[int] = None, + gradient_accumulation_step: Optional[int] = None, + layers_to_freeze: Optional[int] = None, + learning_rate: Optional[float] = None, + learning_rate_scheduler: Optional[Union[str, "_models.LearningRateScheduler"]] = None, + model_name: Optional[str] = None, + momentum: Optional[float] = None, + nesterov: Optional[bool] = None, + number_of_epochs: Optional[int] = None, + number_of_workers: Optional[int] = None, + optimizer: Optional[Union[str, "_models.StochasticOptimizer"]] = None, + random_seed: Optional[int] = None, + step_lr_gamma: Optional[float] = None, + step_lr_step_size: Optional[int] = None, + training_batch_size: Optional[int] = None, + validation_batch_size: Optional[int] = None, + warmup_cosine_lr_cycles: Optional[float] = None, + warmup_cosine_lr_warmup_epochs: Optional[int] = None, + weight_decay: Optional[float] = None, + box_detections_per_image: Optional[int] = None, + box_score_threshold: Optional[float] = None, + image_size: Optional[int] = None, + log_training_metrics: Optional[Union[str, "_models.LogTrainingMetrics"]] = None, + log_validation_loss: Optional[Union[str, "_models.LogValidationLoss"]] = None, + max_size: Optional[int] = None, + min_size: Optional[int] = None, + model_size: Optional[Union[str, "_models.ModelSize"]] = None, + multi_scale: Optional[bool] = None, + nms_iou_threshold: Optional[float] = None, + tile_grid_size: Optional[str] = None, + tile_overlap_ratio: Optional[float] = None, + tile_predictions_nms_threshold: Optional[float] = None, + validation_iou_threshold: Optional[float] = None, + validation_metric_type: Optional[Union[str, "_models.ValidationMetricType"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword advanced_settings: Settings for advanced scenarios. + :paramtype advanced_settings: str + :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :paramtype ams_gradient: bool + :keyword augmentations: Settings for using Augmentations. + :paramtype augmentations: str + :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta1: float + :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta2: float + :keyword checkpoint_frequency: Frequency to store model checkpoints. Must be a positive + integer. + :paramtype checkpoint_frequency: int + :keyword checkpoint_model: The pretrained checkpoint model for incremental training. + :paramtype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput + :keyword checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for + incremental training. + :paramtype checkpoint_run_id: str + :keyword distributed: Whether to use distributed training. + :paramtype distributed: bool + :keyword early_stopping: Enable early stopping logic during training. + :paramtype early_stopping: bool + :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait + before primary metric improvement + is tracked for early stopping. Must be a positive integer. + :paramtype early_stopping_delay: int + :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :paramtype early_stopping_patience: int + :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. + :paramtype enable_onnx_normalization: bool + :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. + Must be a positive integer. + :paramtype evaluation_frequency: int + :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :paramtype gradient_accumulation_step: int + :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive + integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype layers_to_freeze: int + :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :paramtype learning_rate: float + :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. Known values are: "None", "WarmupCosine", and "Step". + :paramtype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.LearningRateScheduler + :keyword model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype model_name: str + :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, + 1]. + :paramtype momentum: float + :keyword nesterov: Enable nesterov when optimizer is 'sgd'. + :paramtype nesterov: bool + :keyword number_of_epochs: Number of training epochs. Must be a positive integer. + :paramtype number_of_epochs: int + :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. + :paramtype number_of_workers: int + :keyword optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", and "Adamw". + :paramtype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer + :keyword random_seed: Random seed to be used when using deterministic training. + :paramtype random_seed: int + :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float + in the range [0, 1]. + :paramtype step_lr_gamma: float + :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be + a positive integer. + :paramtype step_lr_step_size: int + :keyword training_batch_size: Training batch size. Must be a positive integer. + :paramtype training_batch_size: int + :keyword validation_batch_size: Validation batch size. Must be a positive integer. + :paramtype validation_batch_size: int + :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :paramtype warmup_cosine_lr_cycles: float + :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :paramtype warmup_cosine_lr_warmup_epochs: int + :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must + be a float in the range[0, 1]. + :paramtype weight_decay: float + :keyword box_detections_per_image: Maximum number of detections per image, for all classes. + Must be a positive integer. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype box_detections_per_image: int + :keyword box_score_threshold: During inference, only return proposals with a classification + score greater than + BoxScoreThreshold. Must be a float in the range[0, 1]. + :paramtype box_score_threshold: float + :keyword image_size: Image size for train and validation. Must be a positive integer. + Note: The training run may get into CUDA OOM if the size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. + :paramtype image_size: int + :keyword log_training_metrics: Enable computing and logging training metrics. Known values are: + "Enable" and "Disable". + :paramtype log_training_metrics: str or + ~azure.mgmt.machinelearningservices.models.LogTrainingMetrics + :keyword log_validation_loss: Enable computing and logging validation loss. Known values are: + "Enable" and "Disable". + :paramtype log_validation_loss: str or + ~azure.mgmt.machinelearningservices.models.LogValidationLoss + :keyword max_size: Maximum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype max_size: int + :keyword min_size: Minimum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype min_size: int + :keyword model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. + Note: training run may get into CUDA OOM if the model size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. Known values are: "None", + "Small", "Medium", "Large", and "ExtraLarge". + :paramtype model_size: str or ~azure.mgmt.machinelearningservices.models.ModelSize + :keyword multi_scale: Enable multi-scale image by varying image size by +/- 50%. + Note: training run may get into CUDA OOM if no sufficient GPU memory. + Note: This settings is only supported for the 'yolov5' algorithm. + :paramtype multi_scale: bool + :keyword nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be + a float in the range [0, 1]. + :paramtype nms_iou_threshold: float + :keyword tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must + not be + None to enable small object detection logic. A string containing two integers in mxn format. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype tile_grid_size: str + :keyword tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be + float in the range [0, 1). + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype tile_overlap_ratio: float + :keyword tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging + predictions from tiles and image. + Used in validation/ inference. Must be float in the range [0, 1]. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype tile_predictions_nms_threshold: float + :keyword validation_iou_threshold: IOU threshold to use when computing validation metric. Must + be float in the range [0, 1]. + :paramtype validation_iou_threshold: float + :keyword validation_metric_type: Metric computation method to use for validation metrics. Known + values are: "None", "Coco", "Voc", and "CocoVoc". + :paramtype validation_metric_type: str or + ~azure.mgmt.machinelearningservices.models.ValidationMetricType + """ + super().__init__( + advanced_settings=advanced_settings, + ams_gradient=ams_gradient, + augmentations=augmentations, + beta1=beta1, + beta2=beta2, + checkpoint_frequency=checkpoint_frequency, + checkpoint_model=checkpoint_model, + checkpoint_run_id=checkpoint_run_id, + distributed=distributed, + early_stopping=early_stopping, + early_stopping_delay=early_stopping_delay, + early_stopping_patience=early_stopping_patience, + enable_onnx_normalization=enable_onnx_normalization, + evaluation_frequency=evaluation_frequency, + gradient_accumulation_step=gradient_accumulation_step, + layers_to_freeze=layers_to_freeze, + learning_rate=learning_rate, + learning_rate_scheduler=learning_rate_scheduler, + model_name=model_name, + momentum=momentum, + nesterov=nesterov, + number_of_epochs=number_of_epochs, + number_of_workers=number_of_workers, + optimizer=optimizer, + random_seed=random_seed, + step_lr_gamma=step_lr_gamma, + step_lr_step_size=step_lr_step_size, + training_batch_size=training_batch_size, + validation_batch_size=validation_batch_size, + warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, + warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, + weight_decay=weight_decay, + **kwargs + ) + self.box_detections_per_image = box_detections_per_image + self.box_score_threshold = box_score_threshold + self.image_size = image_size + self.log_training_metrics = log_training_metrics + self.log_validation_loss = log_validation_loss + self.max_size = max_size + self.min_size = min_size + self.model_size = model_size + self.multi_scale = multi_scale + self.nms_iou_threshold = nms_iou_threshold + self.tile_grid_size = tile_grid_size + self.tile_overlap_ratio = tile_overlap_ratio + self.tile_predictions_nms_threshold = tile_predictions_nms_threshold + self.validation_iou_threshold = validation_iou_threshold + self.validation_metric_type = validation_metric_type + + +class ImageObjectDetection(ImageObjectDetectionBase, AutoMLVertical): # pylint: disable=too-many-instance-attributes + """Image Object Detection. Object detection is used to identify objects in an image and locate + each object with a + bounding box e.g. locate all dogs and cats in an image and draw a bounding box around each. + + All required parameters must be populated in order to send to Azure. + + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar model_settings: Settings used for training the model. + :vartype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] + :ivar primary_metric: Primary metric to optimize for this task. "MeanAveragePrecision" + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ObjectDetectionPrimaryMetrics + """ + + _validation = { + "task_type": {"required": True}, + "training_data": {"required": True}, + "limit_settings": {"required": True}, + } + + _attribute_map = { + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsObjectDetection"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsObjectDetection]"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, + } + + def __init__( + self, + *, + training_data: "_models.MLTableJobInput", + limit_settings: "_models.ImageLimitSettings", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + model_settings: Optional["_models.ImageModelSettingsObjectDetection"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsObjectDetection"]] = None, + primary_metric: Optional[Union[str, "_models.ObjectDetectionPrimaryMetrics"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword model_settings: Settings used for training the model. + :paramtype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] + :keyword primary_metric: Primary metric to optimize for this task. "MeanAveragePrecision" + :paramtype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ObjectDetectionPrimaryMetrics + """ + super().__init__( + limit_settings=limit_settings, + sweep_settings=sweep_settings, + validation_data=validation_data, + validation_data_size=validation_data_size, + model_settings=model_settings, + search_space=search_space, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type: str = "ImageObjectDetection" + self.training_data = training_data + self.primary_metric = primary_metric + self.limit_settings = limit_settings + self.sweep_settings = sweep_settings + self.validation_data = validation_data + self.validation_data_size = validation_data_size + self.model_settings = model_settings + self.search_space = search_space + + +class ImageSweepSettings(_serialization.Model): + """Model sweeping and hyperparameter sweeping related settings. + + All required parameters must be populated in order to send to Azure. + + :ivar early_termination: Type of early termination policy. + :vartype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy + :ivar sampling_algorithm: [Required] Type of the hyperparameter sampling algorithms. Required. + Known values are: "Grid", "Random", and "Bayesian". + :vartype sampling_algorithm: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + """ + + _validation = { + "sampling_algorithm": {"required": True}, + } + + _attribute_map = { + "early_termination": {"key": "earlyTermination", "type": "EarlyTerminationPolicy"}, + "sampling_algorithm": {"key": "samplingAlgorithm", "type": "str"}, + } + + def __init__( + self, + *, + sampling_algorithm: Union[str, "_models.SamplingAlgorithmType"], + early_termination: Optional["_models.EarlyTerminationPolicy"] = None, + **kwargs: Any + ) -> None: + """ + :keyword early_termination: Type of early termination policy. + :paramtype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy + :keyword sampling_algorithm: [Required] Type of the hyperparameter sampling algorithms. + Required. Known values are: "Grid", "Random", and "Bayesian". + :paramtype sampling_algorithm: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + """ + super().__init__(**kwargs) + self.early_termination = early_termination + self.sampling_algorithm = sampling_algorithm + + +class ImportDataAction(ScheduleActionBase): + """ImportDataAction. + + All required parameters must be populated in order to send to Azure. + + :ivar action_type: [Required] Specifies the action type of the schedule. Required. Known values + are: "CreateJob", "InvokeBatchEndpoint", "ImportData", and "CreateMonitor". + :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType + :ivar data_import_definition: [Required] Defines Schedule action definition details. Required. + :vartype data_import_definition: ~azure.mgmt.machinelearningservices.models.DataImport + """ + + _validation = { + "action_type": {"required": True}, + "data_import_definition": {"required": True}, + } + + _attribute_map = { + "action_type": {"key": "actionType", "type": "str"}, + "data_import_definition": {"key": "dataImportDefinition", "type": "DataImport"}, + } + + def __init__(self, *, data_import_definition: "_models.DataImport", **kwargs: Any) -> None: + """ + :keyword data_import_definition: [Required] Defines Schedule action definition details. + Required. + :paramtype data_import_definition: ~azure.mgmt.machinelearningservices.models.DataImport + """ + super().__init__(**kwargs) + self.action_type: str = "ImportData" + self.data_import_definition = data_import_definition + + +class IndexColumn(_serialization.Model): + """Dto object representing index column. + + :ivar column_name: Specifies the column name. + :vartype column_name: str + :ivar data_type: Specifies the data type. Known values are: "String", "Integer", "Long", + "Float", "Double", "Binary", "Datetime", and "Boolean". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.FeatureDataType + """ + + _attribute_map = { + "column_name": {"key": "columnName", "type": "str"}, + "data_type": {"key": "dataType", "type": "str"}, + } + + def __init__( + self, + *, + column_name: Optional[str] = None, + data_type: Optional[Union[str, "_models.FeatureDataType"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword column_name: Specifies the column name. + :paramtype column_name: str + :keyword data_type: Specifies the data type. Known values are: "String", "Integer", "Long", + "Float", "Double", "Binary", "Datetime", and "Boolean". + :paramtype data_type: str or ~azure.mgmt.machinelearningservices.models.FeatureDataType + """ + super().__init__(**kwargs) + self.column_name = column_name + self.data_type = data_type + + +class InferenceContainerProperties(_serialization.Model): + """InferenceContainerProperties. + + :ivar liveness_route: The route to check the liveness of the inference server container. + :vartype liveness_route: ~azure.mgmt.machinelearningservices.models.Route + :ivar readiness_route: The route to check the readiness of the inference server container. + :vartype readiness_route: ~azure.mgmt.machinelearningservices.models.Route + :ivar scoring_route: The port to send the scoring requests to, within the inference server + container. + :vartype scoring_route: ~azure.mgmt.machinelearningservices.models.Route + """ + + _attribute_map = { + "liveness_route": {"key": "livenessRoute", "type": "Route"}, + "readiness_route": {"key": "readinessRoute", "type": "Route"}, + "scoring_route": {"key": "scoringRoute", "type": "Route"}, + } + + def __init__( + self, + *, + liveness_route: Optional["_models.Route"] = None, + readiness_route: Optional["_models.Route"] = None, + scoring_route: Optional["_models.Route"] = None, + **kwargs: Any + ) -> None: + """ + :keyword liveness_route: The route to check the liveness of the inference server container. + :paramtype liveness_route: ~azure.mgmt.machinelearningservices.models.Route + :keyword readiness_route: The route to check the readiness of the inference server container. + :paramtype readiness_route: ~azure.mgmt.machinelearningservices.models.Route + :keyword scoring_route: The port to send the scoring requests to, within the inference server + container. + :paramtype scoring_route: ~azure.mgmt.machinelearningservices.models.Route + """ + super().__init__(**kwargs) + self.liveness_route = liveness_route + self.readiness_route = readiness_route + self.scoring_route = scoring_route + + +class PropertiesBase(_serialization.Model): + """Base definition for pool resources. + + :ivar description: Description of the endpoint deployment. + :vartype description: str + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + """ + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + } + + def __init__( + self, *, description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, **kwargs: Any + ) -> None: + """ + :keyword description: Description of the endpoint deployment. + :paramtype description: str + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + """ + super().__init__(**kwargs) + self.description = description + self.properties = properties + + +class InferenceEndpoint(PropertiesBase): + """InferenceEndpoint configuration. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: Description of the endpoint deployment. + :vartype description: str + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar auth_mode: [Required] Authentication mode for the endpoint. Required. "AAD" + :vartype auth_mode: str or ~azure.mgmt.machinelearningservices.models.AuthMode + :ivar endpoint_uri: Endpoint URI for the inference endpoint. + :vartype endpoint_uri: str + :ivar provisioning_state: Provisioning state for the endpoint. Known values are: "Creating", + "Deleting", "Succeeded", "Failed", "Updating", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.PoolProvisioningState + """ + + _validation = { + "auth_mode": {"required": True}, + "endpoint_uri": {"readonly": True}, + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "auth_mode": {"key": "authMode", "type": "str"}, + "endpoint_uri": {"key": "endpointUri", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + } + + def __init__( + self, + *, + auth_mode: Union[str, "_models.AuthMode"], + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> None: + """ + :keyword description: Description of the endpoint deployment. + :paramtype description: str + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword auth_mode: [Required] Authentication mode for the endpoint. Required. "AAD" + :paramtype auth_mode: str or ~azure.mgmt.machinelearningservices.models.AuthMode + """ + super().__init__(description=description, properties=properties, **kwargs) + self.auth_mode = auth_mode + self.endpoint_uri = None + self.provisioning_state = None + + +class InferenceEndpointMinimalTrackedResource(_serialization.Model): + """InferenceEndpointMinimalTrackedResource. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource URL of the entity (not URL encoded). + :vartype id: str + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :vartype kind: str + :ivar location: [Required]. Required. + :vartype location: str + :ivar name: The name of the resource entity. + :vartype name: str + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.InferenceEndpoint + :ivar system_data: System data associated with resource provider. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar type: The resource provider and type. + :vartype type: str + """ + + _validation = { + "id": {"readonly": True}, + "location": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "name": {"readonly": True}, + "properties": {"required": True}, + "system_data": {"readonly": True}, + "type": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "kind": {"key": "kind", "type": "str"}, + "location": {"key": "location", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "properties": {"key": "properties", "type": "InferenceEndpoint"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "type": {"key": "type", "type": "str"}, + } + + def __init__( + self, + *, + location: str, + properties: "_models.InferenceEndpoint", + kind: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> None: + """ + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :paramtype kind: str + :keyword location: [Required]. Required. + :paramtype location: str + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.InferenceEndpoint + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + """ + super().__init__(**kwargs) + self.id = None + self.kind = kind + self.location = location + self.name = None + self.properties = properties + self.system_data = None + self.tags = tags + self.type = None + + +class InferenceEndpointMinimalTrackedResourceArmPaginatedResult(_serialization.Model): + """A paginated list of InferenceEndpoint entities. + + :ivar next_link: The link to the next page of InferenceEndpoint objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type InferenceEndpoint. + :vartype value: + list[~azure.mgmt.machinelearningservices.models.InferenceEndpointMinimalTrackedResource] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[InferenceEndpointMinimalTrackedResource]"}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.InferenceEndpointMinimalTrackedResource"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword next_link: The link to the next page of InferenceEndpoint objects. If null, there are + no additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type InferenceEndpoint. + :paramtype value: + list[~azure.mgmt.machinelearningservices.models.InferenceEndpointMinimalTrackedResource] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class InferenceGroup(PropertiesBase): + """Inference group configuration. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar description: Description of the endpoint deployment. + :vartype description: str + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar bonus_extra_capacity: Capacity to be used from the pool's reserved capacity. + optional. + :vartype bonus_extra_capacity: int + :ivar data: Metadata for the inference group. + :vartype data: str + :ivar priority: Priority of the group within the + N:Microsoft.MachineLearning.ManagementFrontEnd.Contracts.V20230801Preview.Pools.InferencePools. + :vartype priority: int + :ivar provisioning_state: Provisioning state for the inference group. Known values are: + "Creating", "Deleting", "Succeeded", "Failed", "Updating", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.PoolProvisioningState + """ + + _validation = { + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "bonus_extra_capacity": {"key": "bonusExtraCapacity", "type": "int"}, + "data": {"key": "data", "type": "str"}, + "priority": {"key": "priority", "type": "int"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + bonus_extra_capacity: int = 0, + data: Optional[str] = None, + priority: int = 0, + **kwargs: Any + ) -> None: + """ + :keyword description: Description of the endpoint deployment. + :paramtype description: str + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword bonus_extra_capacity: Capacity to be used from the pool's reserved capacity. + optional. + :paramtype bonus_extra_capacity: int + :keyword data: Metadata for the inference group. + :paramtype data: str + :keyword priority: Priority of the group within the + N:Microsoft.MachineLearning.ManagementFrontEnd.Contracts.V20230801Preview.Pools.InferencePools. + :paramtype priority: int + """ + super().__init__(description=description, properties=properties, **kwargs) + self.bonus_extra_capacity = bonus_extra_capacity + self.data = data + self.priority = priority + self.provisioning_state = None + + +class InferenceGroupMinimalTrackedResourceWithSku(_serialization.Model): + """InferenceGroupMinimalTrackedResourceWithSku. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource URL of the entity (not URL encoded). + :vartype id: str + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :vartype kind: str + :ivar location: [Required]. Required. + :vartype location: str + :ivar name: The name of the resource entity. + :vartype name: str + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.InferenceGroup + :ivar sku: Sku details required for ARM contract for Autoscaling. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + :ivar system_data: System data associated with resource provider. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar type: The resource provider and type. + :vartype type: str + """ + + _validation = { + "id": {"readonly": True}, + "location": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "name": {"readonly": True}, + "properties": {"required": True}, + "system_data": {"readonly": True}, + "type": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "kind": {"key": "kind", "type": "str"}, + "location": {"key": "location", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "properties": {"key": "properties", "type": "InferenceGroup"}, + "sku": {"key": "sku", "type": "Sku"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "type": {"key": "type", "type": "str"}, + } + + def __init__( + self, + *, + location: str, + properties: "_models.InferenceGroup", + kind: Optional[str] = None, + sku: Optional["_models.Sku"] = None, + tags: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> None: + """ + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :paramtype kind: str + :keyword location: [Required]. Required. + :paramtype location: str + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.InferenceGroup + :keyword sku: Sku details required for ARM contract for Autoscaling. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + """ + super().__init__(**kwargs) + self.id = None + self.kind = kind + self.location = location + self.name = None + self.properties = properties + self.sku = sku + self.system_data = None + self.tags = tags + self.type = None + + +class InferenceGroupMinimalTrackedResourceWithSkuArmPaginatedResult(_serialization.Model): + """A paginated list of InferenceGroup entities. + + :ivar next_link: The link to the next page of InferenceGroup objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type InferenceGroup. + :vartype value: + list[~azure.mgmt.machinelearningservices.models.InferenceGroupMinimalTrackedResourceWithSku] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[InferenceGroupMinimalTrackedResourceWithSku]"}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.InferenceGroupMinimalTrackedResourceWithSku"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword next_link: The link to the next page of InferenceGroup objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type InferenceGroup. + :paramtype value: + list[~azure.mgmt.machinelearningservices.models.InferenceGroupMinimalTrackedResourceWithSku] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class InferencePool(TrackedResource): + """InferencePool. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar identity: Managed service identity (system assigned and/or user assigned identities). + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :vartype kind: str + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.InferencePoolProperties + :ivar sku: Sku details required for ARM contract for Autoscaling. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "InferencePoolProperties"}, + "sku": {"key": "sku", "type": "Sku"}, + } + + def __init__( + self, + *, + location: str, + properties: "_models.InferencePoolProperties", + tags: Optional[Dict[str, str]] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, + kind: Optional[str] = None, + sku: Optional["_models.Sku"] = None, + **kwargs: Any + ) -> None: + """ + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + :keyword location: The geo-location where the resource lives. Required. + :paramtype location: str + :keyword identity: Managed service identity (system assigned and/or user assigned identities). + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :paramtype kind: str + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.InferencePoolProperties + :keyword sku: Sku details required for ARM contract for Autoscaling. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + super().__init__(tags=tags, location=location, **kwargs) + self.identity = identity + self.kind = kind + self.properties = properties + self.sku = sku + + +class InferencePoolProperties(PropertiesBase): + """Inference pool configuration. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: Description of the endpoint deployment. + :vartype description: str + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar code_configuration: Code configuration for the inference pool. + :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :ivar environment_configuration: EnvironmentConfiguration for the inference pool. + :vartype environment_configuration: + ~azure.mgmt.machinelearningservices.models.PoolEnvironmentConfiguration + :ivar model_configuration: ModelConfiguration for the inference pool. + :vartype model_configuration: ~azure.mgmt.machinelearningservices.models.PoolModelConfiguration + :ivar node_sku_type: [Required] Compute instance type. Required. + :vartype node_sku_type: str + :ivar provisioning_state: Provisioning state for the pool. Known values are: "Creating", + "Deleting", "Succeeded", "Failed", "Updating", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.PoolProvisioningState + :ivar request_configuration: Request configuration for the inference pool. + :vartype request_configuration: ~azure.mgmt.machinelearningservices.models.RequestConfiguration + """ + + _validation = { + "node_sku_type": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, + "environment_configuration": {"key": "environmentConfiguration", "type": "PoolEnvironmentConfiguration"}, + "model_configuration": {"key": "modelConfiguration", "type": "PoolModelConfiguration"}, + "node_sku_type": {"key": "nodeSkuType", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "request_configuration": {"key": "requestConfiguration", "type": "RequestConfiguration"}, + } + + def __init__( + self, + *, + node_sku_type: str, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + code_configuration: Optional["_models.CodeConfiguration"] = None, + environment_configuration: Optional["_models.PoolEnvironmentConfiguration"] = None, + model_configuration: Optional["_models.PoolModelConfiguration"] = None, + request_configuration: Optional["_models.RequestConfiguration"] = None, + **kwargs: Any + ) -> None: + """ + :keyword description: Description of the endpoint deployment. + :paramtype description: str + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword code_configuration: Code configuration for the inference pool. + :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :keyword environment_configuration: EnvironmentConfiguration for the inference pool. + :paramtype environment_configuration: + ~azure.mgmt.machinelearningservices.models.PoolEnvironmentConfiguration + :keyword model_configuration: ModelConfiguration for the inference pool. + :paramtype model_configuration: + ~azure.mgmt.machinelearningservices.models.PoolModelConfiguration + :keyword node_sku_type: [Required] Compute instance type. Required. + :paramtype node_sku_type: str + :keyword request_configuration: Request configuration for the inference pool. + :paramtype request_configuration: + ~azure.mgmt.machinelearningservices.models.RequestConfiguration + """ + super().__init__(description=description, properties=properties, **kwargs) + self.code_configuration = code_configuration + self.environment_configuration = environment_configuration + self.model_configuration = model_configuration + self.node_sku_type = node_sku_type + self.provisioning_state = None + self.request_configuration = request_configuration + + +class InferencePoolTrackedResourceArmPaginatedResult(_serialization.Model): + """A paginated list of InferencePool entities. + + :ivar next_link: The link to the next page of InferencePool objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type InferencePool. + :vartype value: list[~azure.mgmt.machinelearningservices.models.InferencePool] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[InferencePool]"}, + } + + def __init__( + self, *, next_link: Optional[str] = None, value: Optional[List["_models.InferencePool"]] = None, **kwargs: Any + ) -> None: + """ + :keyword next_link: The link to the next page of InferencePool objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type InferencePool. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.InferencePool] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class InstanceTypeSchema(_serialization.Model): + """Instance type schema. + + :ivar node_selector: Node Selector. + :vartype node_selector: dict[str, str] + :ivar resources: Resource requests/limits for this instance type. + :vartype resources: ~azure.mgmt.machinelearningservices.models.InstanceTypeSchemaResources + """ + + _attribute_map = { + "node_selector": {"key": "nodeSelector", "type": "{str}"}, + "resources": {"key": "resources", "type": "InstanceTypeSchemaResources"}, + } + + def __init__( + self, + *, + node_selector: Optional[Dict[str, str]] = None, + resources: Optional["_models.InstanceTypeSchemaResources"] = None, + **kwargs: Any + ) -> None: + """ + :keyword node_selector: Node Selector. + :paramtype node_selector: dict[str, str] + :keyword resources: Resource requests/limits for this instance type. + :paramtype resources: ~azure.mgmt.machinelearningservices.models.InstanceTypeSchemaResources + """ + super().__init__(**kwargs) + self.node_selector = node_selector + self.resources = resources + + +class InstanceTypeSchemaResources(_serialization.Model): + """Resource requests/limits for this instance type. + + :ivar requests: Resource requests for this instance type. + :vartype requests: dict[str, str] + :ivar limits: Resource limits for this instance type. + :vartype limits: dict[str, str] + """ + + _attribute_map = { + "requests": {"key": "requests", "type": "{str}"}, + "limits": {"key": "limits", "type": "{str}"}, + } + + def __init__( + self, *, requests: Optional[Dict[str, str]] = None, limits: Optional[Dict[str, str]] = None, **kwargs: Any + ) -> None: + """ + :keyword requests: Resource requests for this instance type. + :paramtype requests: dict[str, str] + :keyword limits: Resource limits for this instance type. + :paramtype limits: dict[str, str] + """ + super().__init__(**kwargs) + self.requests = requests + self.limits = limits + + +class IntellectualProperty(_serialization.Model): + """Intellectual Property details for a resource. + + All required parameters must be populated in order to send to Azure. + + :ivar protection_level: Protection level of the Intellectual Property. Known values are: "All" + and "None". + :vartype protection_level: str or ~azure.mgmt.machinelearningservices.models.ProtectionLevel + :ivar publisher: [Required] Publisher of the Intellectual Property. Must be the same as + Registry publisher name. Required. + :vartype publisher: str + """ + + _validation = { + "publisher": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "protection_level": {"key": "protectionLevel", "type": "str"}, + "publisher": {"key": "publisher", "type": "str"}, + } + + def __init__( + self, *, publisher: str, protection_level: Optional[Union[str, "_models.ProtectionLevel"]] = None, **kwargs: Any + ) -> None: + """ + :keyword protection_level: Protection level of the Intellectual Property. Known values are: + "All" and "None". + :paramtype protection_level: str or ~azure.mgmt.machinelearningservices.models.ProtectionLevel + :keyword publisher: [Required] Publisher of the Intellectual Property. Must be the same as + Registry publisher name. Required. + :paramtype publisher: str + """ + super().__init__(**kwargs) + self.protection_level = protection_level + self.publisher = publisher + + +class JobBase(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.JobBaseProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "JobBaseProperties"}, + } + + def __init__(self, *, properties: "_models.JobBaseProperties", **kwargs: Any) -> None: + """ + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.JobBaseProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class JobBaseResourceArmPaginatedResult(_serialization.Model): + """A paginated list of JobBase entities. + + :ivar next_link: The link to the next page of JobBase objects. If null, there are no additional + pages. + :vartype next_link: str + :ivar value: An array of objects of type JobBase. + :vartype value: list[~azure.mgmt.machinelearningservices.models.JobBase] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[JobBase]"}, + } + + def __init__( + self, *, next_link: Optional[str] = None, value: Optional[List["_models.JobBase"]] = None, **kwargs: Any + ) -> None: + """ + :keyword next_link: The link to the next page of JobBase objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type JobBase. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.JobBase] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class JobResourceConfiguration(ResourceConfiguration): + """JobResourceConfiguration. + + :ivar instance_count: Optional number of instances or nodes used by the compute target. + :vartype instance_count: int + :ivar instance_type: Optional type of VM used as supported by the compute target. + :vartype instance_type: str + :ivar locations: Locations where the job can run. + :vartype locations: list[str] + :ivar max_instance_count: Optional max allowed number of instances or nodes to be used by the + compute target. + For use with elastic training, currently supported by PyTorch distribution type only. + :vartype max_instance_count: int + :ivar properties: Additional properties bag. + :vartype properties: dict[str, JSON] + :ivar docker_args: Extra arguments to pass to the Docker run command. This would override any + parameters that have already been set by the system, or in this section. This parameter is only + supported for Azure ML compute types. + :vartype docker_args: str + :ivar shm_size: Size of the docker container's shared memory block. This should be in the + format of (number)(unit) where number as to be greater than 0 and the unit can be one of + b(bytes), k(kilobytes), m(megabytes), or g(gigabytes). + :vartype shm_size: str + """ + + _validation = { + "shm_size": {"pattern": r"\d+[bBkKmMgG]"}, + } + + _attribute_map = { + "instance_count": {"key": "instanceCount", "type": "int"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "locations": {"key": "locations", "type": "[str]"}, + "max_instance_count": {"key": "maxInstanceCount", "type": "int"}, + "properties": {"key": "properties", "type": "{object}"}, + "docker_args": {"key": "dockerArgs", "type": "str"}, + "shm_size": {"key": "shmSize", "type": "str"}, + } + + def __init__( + self, + *, + instance_count: int = 1, + instance_type: Optional[str] = None, + locations: Optional[List[str]] = None, + max_instance_count: Optional[int] = None, + properties: Optional[Dict[str, JSON]] = None, + docker_args: Optional[str] = None, + shm_size: str = "2g", + **kwargs: Any + ) -> None: + """ + :keyword instance_count: Optional number of instances or nodes used by the compute target. + :paramtype instance_count: int + :keyword instance_type: Optional type of VM used as supported by the compute target. + :paramtype instance_type: str + :keyword locations: Locations where the job can run. + :paramtype locations: list[str] + :keyword max_instance_count: Optional max allowed number of instances or nodes to be used by + the compute target. + For use with elastic training, currently supported by PyTorch distribution type only. + :paramtype max_instance_count: int + :keyword properties: Additional properties bag. + :paramtype properties: dict[str, JSON] + :keyword docker_args: Extra arguments to pass to the Docker run command. This would override + any parameters that have already been set by the system, or in this section. This parameter is + only supported for Azure ML compute types. + :paramtype docker_args: str + :keyword shm_size: Size of the docker container's shared memory block. This should be in the + format of (number)(unit) where number as to be greater than 0 and the unit can be one of + b(bytes), k(kilobytes), m(megabytes), or g(gigabytes). + :paramtype shm_size: str + """ + super().__init__( + instance_count=instance_count, + instance_type=instance_type, + locations=locations, + max_instance_count=max_instance_count, + properties=properties, + **kwargs + ) + self.docker_args = docker_args + self.shm_size = shm_size + + +class JobScheduleAction(ScheduleActionBase): + """JobScheduleAction. + + All required parameters must be populated in order to send to Azure. + + :ivar action_type: [Required] Specifies the action type of the schedule. Required. Known values + are: "CreateJob", "InvokeBatchEndpoint", "ImportData", and "CreateMonitor". + :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType + :ivar job_definition: [Required] Defines Schedule action definition details. Required. + :vartype job_definition: ~azure.mgmt.machinelearningservices.models.JobBaseProperties + """ + + _validation = { + "action_type": {"required": True}, + "job_definition": {"required": True}, + } + + _attribute_map = { + "action_type": {"key": "actionType", "type": "str"}, + "job_definition": {"key": "jobDefinition", "type": "JobBaseProperties"}, + } + + def __init__(self, *, job_definition: "_models.JobBaseProperties", **kwargs: Any) -> None: + """ + :keyword job_definition: [Required] Defines Schedule action definition details. Required. + :paramtype job_definition: ~azure.mgmt.machinelearningservices.models.JobBaseProperties + """ + super().__init__(**kwargs) + self.action_type: str = "CreateJob" + self.job_definition = job_definition + + +class JobService(_serialization.Model): + """Job endpoint definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar endpoint: Url for endpoint. + :vartype endpoint: str + :ivar error_message: Any error in the service. + :vartype error_message: str + :ivar job_service_type: Endpoint type. + :vartype job_service_type: str + :ivar nodes: Nodes that user would like to start the service on. + If Nodes is not set or set to null, the service will only be started on leader node. + :vartype nodes: ~azure.mgmt.machinelearningservices.models.Nodes + :ivar port: Port for endpoint set by user. + :vartype port: int + :ivar properties: Additional properties to set on the endpoint. + :vartype properties: dict[str, str] + :ivar status: Status of endpoint. + :vartype status: str + """ + + _validation = { + "error_message": {"readonly": True}, + "status": {"readonly": True}, + } + + _attribute_map = { + "endpoint": {"key": "endpoint", "type": "str"}, + "error_message": {"key": "errorMessage", "type": "str"}, + "job_service_type": {"key": "jobServiceType", "type": "str"}, + "nodes": {"key": "nodes", "type": "Nodes"}, + "port": {"key": "port", "type": "int"}, + "properties": {"key": "properties", "type": "{str}"}, + "status": {"key": "status", "type": "str"}, + } + + def __init__( + self, + *, + endpoint: Optional[str] = None, + job_service_type: Optional[str] = None, + nodes: Optional["_models.Nodes"] = None, + port: Optional[int] = None, + properties: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> None: + """ + :keyword endpoint: Url for endpoint. + :paramtype endpoint: str + :keyword job_service_type: Endpoint type. + :paramtype job_service_type: str + :keyword nodes: Nodes that user would like to start the service on. + If Nodes is not set or set to null, the service will only be started on leader node. + :paramtype nodes: ~azure.mgmt.machinelearningservices.models.Nodes + :keyword port: Port for endpoint set by user. + :paramtype port: int + :keyword properties: Additional properties to set on the endpoint. + :paramtype properties: dict[str, str] + """ + super().__init__(**kwargs) + self.endpoint = endpoint + self.error_message = None + self.job_service_type = job_service_type + self.nodes = nodes + self.port = port + self.properties = properties + self.status = None + + +class KerberosCredentials(_serialization.Model): + """KerberosCredentials. + + All required parameters must be populated in order to send to Azure. + + :ivar kerberos_kdc_address: [Required] IP Address or DNS HostName. Required. + :vartype kerberos_kdc_address: str + :ivar kerberos_principal: [Required] Kerberos Username. Required. + :vartype kerberos_principal: str + :ivar kerberos_realm: [Required] Domain over which a Kerberos authentication server has the + authority to authenticate a user, host or service. Required. + :vartype kerberos_realm: str + """ + + _validation = { + "kerberos_kdc_address": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_principal": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_realm": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "kerberos_kdc_address": {"key": "kerberosKdcAddress", "type": "str"}, + "kerberos_principal": {"key": "kerberosPrincipal", "type": "str"}, + "kerberos_realm": {"key": "kerberosRealm", "type": "str"}, + } + + def __init__( + self, *, kerberos_kdc_address: str, kerberos_principal: str, kerberos_realm: str, **kwargs: Any + ) -> None: + """ + :keyword kerberos_kdc_address: [Required] IP Address or DNS HostName. Required. + :paramtype kerberos_kdc_address: str + :keyword kerberos_principal: [Required] Kerberos Username. Required. + :paramtype kerberos_principal: str + :keyword kerberos_realm: [Required] Domain over which a Kerberos authentication server has the + authority to authenticate a user, host or service. Required. + :paramtype kerberos_realm: str + """ + super().__init__(**kwargs) + self.kerberos_kdc_address = kerberos_kdc_address + self.kerberos_principal = kerberos_principal + self.kerberos_realm = kerberos_realm + + +class KerberosKeytabCredentials(KerberosCredentials, DatastoreCredentials): + """KerberosKeytabCredentials. + + All required parameters must be populated in order to send to Azure. + + :ivar credentials_type: [Required] Credential type used to authentication with storage. + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". + :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType + :ivar kerberos_kdc_address: [Required] IP Address or DNS HostName. Required. + :vartype kerberos_kdc_address: str + :ivar kerberos_principal: [Required] Kerberos Username. Required. + :vartype kerberos_principal: str + :ivar kerberos_realm: [Required] Domain over which a Kerberos authentication server has the + authority to authenticate a user, host or service. Required. + :vartype kerberos_realm: str + :ivar secrets: [Required] Keytab secrets. Required. + :vartype secrets: ~azure.mgmt.machinelearningservices.models.KerberosKeytabSecrets + """ + + _validation = { + "credentials_type": {"required": True}, + "kerberos_kdc_address": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_principal": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_realm": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "secrets": {"required": True}, + } + + _attribute_map = { + "credentials_type": {"key": "credentialsType", "type": "str"}, + "kerberos_kdc_address": {"key": "kerberosKdcAddress", "type": "str"}, + "kerberos_principal": {"key": "kerberosPrincipal", "type": "str"}, + "kerberos_realm": {"key": "kerberosRealm", "type": "str"}, + "secrets": {"key": "secrets", "type": "KerberosKeytabSecrets"}, + } + + def __init__( + self, + *, + kerberos_kdc_address: str, + kerberos_principal: str, + kerberos_realm: str, + secrets: "_models.KerberosKeytabSecrets", + **kwargs: Any + ) -> None: + """ + :keyword kerberos_kdc_address: [Required] IP Address or DNS HostName. Required. + :paramtype kerberos_kdc_address: str + :keyword kerberos_principal: [Required] Kerberos Username. Required. + :paramtype kerberos_principal: str + :keyword kerberos_realm: [Required] Domain over which a Kerberos authentication server has the + authority to authenticate a user, host or service. Required. + :paramtype kerberos_realm: str + :keyword secrets: [Required] Keytab secrets. Required. + :paramtype secrets: ~azure.mgmt.machinelearningservices.models.KerberosKeytabSecrets + """ + super().__init__( + kerberos_kdc_address=kerberos_kdc_address, + kerberos_principal=kerberos_principal, + kerberos_realm=kerberos_realm, + **kwargs + ) + self.credentials_type: str = "KerberosKeytab" + self.secrets = secrets + self.kerberos_kdc_address = kerberos_kdc_address + self.kerberos_principal = kerberos_principal + self.kerberos_realm = kerberos_realm + + +class KerberosKeytabSecrets(DatastoreSecrets): + """KerberosKeytabSecrets. + + All required parameters must be populated in order to send to Azure. + + :ivar secrets_type: [Required] Credential type used to authentication with storage. Required. + Known values are: "AccountKey", "Certificate", "Sas", "ServicePrincipal", "KerberosPassword", + and "KerberosKeytab". + :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType + :ivar kerberos_keytab: Kerberos keytab secret. + :vartype kerberos_keytab: str + """ + + _validation = { + "secrets_type": {"required": True}, + } + + _attribute_map = { + "secrets_type": {"key": "secretsType", "type": "str"}, + "kerberos_keytab": {"key": "kerberosKeytab", "type": "str"}, + } + + def __init__(self, *, kerberos_keytab: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword kerberos_keytab: Kerberos keytab secret. + :paramtype kerberos_keytab: str + """ + super().__init__(**kwargs) + self.secrets_type: str = "KerberosKeytab" + self.kerberos_keytab = kerberos_keytab + + +class KerberosPasswordCredentials(KerberosCredentials, DatastoreCredentials): + """KerberosPasswordCredentials. + + All required parameters must be populated in order to send to Azure. + + :ivar credentials_type: [Required] Credential type used to authentication with storage. + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". + :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType + :ivar kerberos_kdc_address: [Required] IP Address or DNS HostName. Required. + :vartype kerberos_kdc_address: str + :ivar kerberos_principal: [Required] Kerberos Username. Required. + :vartype kerberos_principal: str + :ivar kerberos_realm: [Required] Domain over which a Kerberos authentication server has the + authority to authenticate a user, host or service. Required. + :vartype kerberos_realm: str + :ivar secrets: [Required] Kerberos password secrets. Required. + :vartype secrets: ~azure.mgmt.machinelearningservices.models.KerberosPasswordSecrets + """ + + _validation = { + "credentials_type": {"required": True}, + "kerberos_kdc_address": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_principal": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_realm": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "secrets": {"required": True}, + } + + _attribute_map = { + "credentials_type": {"key": "credentialsType", "type": "str"}, + "kerberos_kdc_address": {"key": "kerberosKdcAddress", "type": "str"}, + "kerberos_principal": {"key": "kerberosPrincipal", "type": "str"}, + "kerberos_realm": {"key": "kerberosRealm", "type": "str"}, + "secrets": {"key": "secrets", "type": "KerberosPasswordSecrets"}, + } + + def __init__( + self, + *, + kerberos_kdc_address: str, + kerberos_principal: str, + kerberos_realm: str, + secrets: "_models.KerberosPasswordSecrets", + **kwargs: Any + ) -> None: + """ + :keyword kerberos_kdc_address: [Required] IP Address or DNS HostName. Required. + :paramtype kerberos_kdc_address: str + :keyword kerberos_principal: [Required] Kerberos Username. Required. + :paramtype kerberos_principal: str + :keyword kerberos_realm: [Required] Domain over which a Kerberos authentication server has the + authority to authenticate a user, host or service. Required. + :paramtype kerberos_realm: str + :keyword secrets: [Required] Kerberos password secrets. Required. + :paramtype secrets: ~azure.mgmt.machinelearningservices.models.KerberosPasswordSecrets + """ + super().__init__( + kerberos_kdc_address=kerberos_kdc_address, + kerberos_principal=kerberos_principal, + kerberos_realm=kerberos_realm, + **kwargs + ) + self.credentials_type: str = "KerberosPassword" + self.secrets = secrets + self.kerberos_kdc_address = kerberos_kdc_address + self.kerberos_principal = kerberos_principal + self.kerberos_realm = kerberos_realm + + +class KerberosPasswordSecrets(DatastoreSecrets): + """KerberosPasswordSecrets. + + All required parameters must be populated in order to send to Azure. + + :ivar secrets_type: [Required] Credential type used to authentication with storage. Required. + Known values are: "AccountKey", "Certificate", "Sas", "ServicePrincipal", "KerberosPassword", + and "KerberosKeytab". + :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType + :ivar kerberos_password: Kerberos password secret. + :vartype kerberos_password: str + """ + + _validation = { + "secrets_type": {"required": True}, + } + + _attribute_map = { + "secrets_type": {"key": "secretsType", "type": "str"}, + "kerberos_password": {"key": "kerberosPassword", "type": "str"}, + } + + def __init__(self, *, kerberos_password: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword kerberos_password: Kerberos password secret. + :paramtype kerberos_password: str + """ + super().__init__(**kwargs) + self.secrets_type: str = "KerberosPassword" + self.kerberos_password = kerberos_password + + +class KeyVaultProperties(_serialization.Model): + """Customer Key vault properties. + + All required parameters must be populated in order to send to Azure. + + :ivar identity_client_id: Currently, we support only SystemAssigned MSI. + We need this when we support UserAssignedIdentities. + :vartype identity_client_id: str + :ivar key_identifier: KeyVault key identifier to encrypt the data. Required. + :vartype key_identifier: str + :ivar key_vault_arm_id: KeyVault Arm Id that contains the data encryption key. Required. + :vartype key_vault_arm_id: str + """ + + _validation = { + "key_identifier": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "key_vault_arm_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "identity_client_id": {"key": "identityClientId", "type": "str"}, + "key_identifier": {"key": "keyIdentifier", "type": "str"}, + "key_vault_arm_id": {"key": "keyVaultArmId", "type": "str"}, + } + + def __init__( + self, *, key_identifier: str, key_vault_arm_id: str, identity_client_id: Optional[str] = None, **kwargs: Any + ) -> None: + """ + :keyword identity_client_id: Currently, we support only SystemAssigned MSI. + We need this when we support UserAssignedIdentities. + :paramtype identity_client_id: str + :keyword key_identifier: KeyVault key identifier to encrypt the data. Required. + :paramtype key_identifier: str + :keyword key_vault_arm_id: KeyVault Arm Id that contains the data encryption key. Required. + :paramtype key_vault_arm_id: str + """ + super().__init__(**kwargs) + self.identity_client_id = identity_client_id + self.key_identifier = key_identifier + self.key_vault_arm_id = key_vault_arm_id + + +class KubernetesSchema(_serialization.Model): + """Kubernetes Compute Schema. + + :ivar properties: Properties of Kubernetes. + :vartype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties + """ + + _attribute_map = { + "properties": {"key": "properties", "type": "KubernetesProperties"}, + } + + def __init__(self, *, properties: Optional["_models.KubernetesProperties"] = None, **kwargs: Any) -> None: + """ + :keyword properties: Properties of Kubernetes. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class Kubernetes(Compute, KubernetesSchema): # pylint: disable=too-many-instance-attributes + """A Machine Learning compute based on Kubernetes Compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar properties: Properties of Kubernetes. + :vartype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: ~datetime.datetime + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool + """ + + _validation = { + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, + } + + _attribute_map = { + "properties": {"key": "properties", "type": "KubernetesProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, + } + + def __init__( + self, + *, + properties: Optional["_models.KubernetesProperties"] = None, + compute_location: Optional[str] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + disable_local_auth: Optional[bool] = None, + **kwargs: Any + ) -> None: + """ + :keyword properties: Properties of Kubernetes. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties + :keyword compute_location: Location for the underlying compute. + :paramtype compute_location: str + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only + MSI and AAD exclusively for authentication. + :paramtype disable_local_auth: bool + """ + super().__init__( + compute_location=compute_location, + description=description, + resource_id=resource_id, + disable_local_auth=disable_local_auth, + properties=properties, + **kwargs + ) + self.properties = properties + self.compute_type: str = "Kubernetes" + self.compute_location = compute_location + self.provisioning_state = None + self.description = description + self.created_on = None + self.modified_on = None + self.resource_id = resource_id + self.provisioning_errors = None + self.is_attached_compute = None + self.disable_local_auth = disable_local_auth + + +class OnlineDeploymentProperties(EndpointDeploymentPropertiesBase): # pylint: disable=too-many-instance-attributes + """OnlineDeploymentProperties. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + KubernetesOnlineDeployment, ManagedOnlineDeployment + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar code_configuration: Code configuration for the endpoint deployment. + :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :ivar description: Description of the endpoint deployment. + :vartype description: str + :ivar environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :vartype environment_id: str + :ivar environment_variables: Environment variables configuration for the deployment. + :vartype environment_variables: dict[str, str] + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar app_insights_enabled: If true, enables Application Insights logging. + :vartype app_insights_enabled: bool + :ivar data_collector: The mdc configuration, we disable mdc when it's null. + :vartype data_collector: ~azure.mgmt.machinelearningservices.models.DataCollector + :ivar egress_public_network_access: If Enabled, allow egress public network access. If + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and + "Disabled". + :vartype egress_public_network_access: str or + ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType + :ivar endpoint_compute_type: [Required] The compute type of the endpoint. Required. Known + values are: "Managed", "Kubernetes", and "AzureMLCompute". + :vartype endpoint_compute_type: str or + ~azure.mgmt.machinelearningservices.models.EndpointComputeType + :ivar instance_type: Compute instance type. + :vartype instance_type: str + :ivar liveness_probe: Liveness probe monitors the health of the container regularly. + :vartype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :ivar model: The URI path to the model. + :vartype model: str + :ivar model_mount_path: The path to mount the model in custom container. + :vartype model_mount_path: str + :ivar provisioning_state: Provisioning state for the endpoint deployment. Known values are: + "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState + :ivar readiness_probe: Readiness probe validates if the container is ready to serve traffic. + The properties and defaults are the same as liveness probe. + :vartype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :ivar request_settings: Request settings for the deployment. + :vartype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :ivar scale_settings: Scale settings for the deployment. + If it is null or not provided, + it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment + and to DefaultScaleSettings for ManagedOnlineDeployment. + :vartype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + """ + + _validation = { + "endpoint_compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, + "description": {"key": "description", "type": "str"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "properties": {"key": "properties", "type": "{str}"}, + "app_insights_enabled": {"key": "appInsightsEnabled", "type": "bool"}, + "data_collector": {"key": "dataCollector", "type": "DataCollector"}, + "egress_public_network_access": {"key": "egressPublicNetworkAccess", "type": "str"}, + "endpoint_compute_type": {"key": "endpointComputeType", "type": "str"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "liveness_probe": {"key": "livenessProbe", "type": "ProbeSettings"}, + "model": {"key": "model", "type": "str"}, + "model_mount_path": {"key": "modelMountPath", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "readiness_probe": {"key": "readinessProbe", "type": "ProbeSettings"}, + "request_settings": {"key": "requestSettings", "type": "OnlineRequestSettings"}, + "scale_settings": {"key": "scaleSettings", "type": "OnlineScaleSettings"}, + } + + _subtype_map = { + "endpoint_compute_type": {"Kubernetes": "KubernetesOnlineDeployment", "Managed": "ManagedOnlineDeployment"} + } + + def __init__( + self, + *, + code_configuration: Optional["_models.CodeConfiguration"] = None, + description: Optional[str] = None, + environment_id: Optional[str] = None, + environment_variables: Optional[Dict[str, str]] = None, + properties: Optional[Dict[str, str]] = None, + app_insights_enabled: bool = False, + data_collector: Optional["_models.DataCollector"] = None, + egress_public_network_access: Optional[Union[str, "_models.EgressPublicNetworkAccessType"]] = None, + instance_type: Optional[str] = None, + liveness_probe: Optional["_models.ProbeSettings"] = None, + model: Optional[str] = None, + model_mount_path: Optional[str] = None, + readiness_probe: Optional["_models.ProbeSettings"] = None, + request_settings: Optional["_models.OnlineRequestSettings"] = None, + scale_settings: Optional["_models.OnlineScaleSettings"] = None, + **kwargs: Any + ) -> None: + """ + :keyword code_configuration: Code configuration for the endpoint deployment. + :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :keyword description: Description of the endpoint deployment. + :paramtype description: str + :keyword environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :paramtype environment_id: str + :keyword environment_variables: Environment variables configuration for the deployment. + :paramtype environment_variables: dict[str, str] + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword app_insights_enabled: If true, enables Application Insights logging. + :paramtype app_insights_enabled: bool + :keyword data_collector: The mdc configuration, we disable mdc when it's null. + :paramtype data_collector: ~azure.mgmt.machinelearningservices.models.DataCollector + :keyword egress_public_network_access: If Enabled, allow egress public network access. If + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and + "Disabled". + :paramtype egress_public_network_access: str or + ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType + :keyword instance_type: Compute instance type. + :paramtype instance_type: str + :keyword liveness_probe: Liveness probe monitors the health of the container regularly. + :paramtype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :keyword model: The URI path to the model. + :paramtype model: str + :keyword model_mount_path: The path to mount the model in custom container. + :paramtype model_mount_path: str + :keyword readiness_probe: Readiness probe validates if the container is ready to serve traffic. + The properties and defaults are the same as liveness probe. + :paramtype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :keyword request_settings: Request settings for the deployment. + :paramtype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :keyword scale_settings: Scale settings for the deployment. + If it is null or not provided, + it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment + and to DefaultScaleSettings for ManagedOnlineDeployment. + :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + """ + super().__init__( + code_configuration=code_configuration, + description=description, + environment_id=environment_id, + environment_variables=environment_variables, + properties=properties, + **kwargs + ) + self.app_insights_enabled = app_insights_enabled + self.data_collector = data_collector + self.egress_public_network_access = egress_public_network_access + self.endpoint_compute_type: Optional[str] = None + self.instance_type = instance_type + self.liveness_probe = liveness_probe + self.model = model + self.model_mount_path = model_mount_path + self.provisioning_state = None + self.readiness_probe = readiness_probe + self.request_settings = request_settings + self.scale_settings = scale_settings + + +class KubernetesOnlineDeployment(OnlineDeploymentProperties): # pylint: disable=too-many-instance-attributes + """Properties specific to a KubernetesOnlineDeployment. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar code_configuration: Code configuration for the endpoint deployment. + :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :ivar description: Description of the endpoint deployment. + :vartype description: str + :ivar environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :vartype environment_id: str + :ivar environment_variables: Environment variables configuration for the deployment. + :vartype environment_variables: dict[str, str] + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar app_insights_enabled: If true, enables Application Insights logging. + :vartype app_insights_enabled: bool + :ivar data_collector: The mdc configuration, we disable mdc when it's null. + :vartype data_collector: ~azure.mgmt.machinelearningservices.models.DataCollector + :ivar egress_public_network_access: If Enabled, allow egress public network access. If + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and + "Disabled". + :vartype egress_public_network_access: str or + ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType + :ivar endpoint_compute_type: [Required] The compute type of the endpoint. Required. Known + values are: "Managed", "Kubernetes", and "AzureMLCompute". + :vartype endpoint_compute_type: str or + ~azure.mgmt.machinelearningservices.models.EndpointComputeType + :ivar instance_type: Compute instance type. + :vartype instance_type: str + :ivar liveness_probe: Liveness probe monitors the health of the container regularly. + :vartype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :ivar model: The URI path to the model. + :vartype model: str + :ivar model_mount_path: The path to mount the model in custom container. + :vartype model_mount_path: str + :ivar provisioning_state: Provisioning state for the endpoint deployment. Known values are: + "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState + :ivar readiness_probe: Readiness probe validates if the container is ready to serve traffic. + The properties and defaults are the same as liveness probe. + :vartype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :ivar request_settings: Request settings for the deployment. + :vartype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :ivar scale_settings: Scale settings for the deployment. + If it is null or not provided, + it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment + and to DefaultScaleSettings for ManagedOnlineDeployment. + :vartype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + :ivar container_resource_requirements: The resource requirements for the container (cpu and + memory). + :vartype container_resource_requirements: + ~azure.mgmt.machinelearningservices.models.ContainerResourceRequirements + """ + + _validation = { + "endpoint_compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, + "description": {"key": "description", "type": "str"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "properties": {"key": "properties", "type": "{str}"}, + "app_insights_enabled": {"key": "appInsightsEnabled", "type": "bool"}, + "data_collector": {"key": "dataCollector", "type": "DataCollector"}, + "egress_public_network_access": {"key": "egressPublicNetworkAccess", "type": "str"}, + "endpoint_compute_type": {"key": "endpointComputeType", "type": "str"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "liveness_probe": {"key": "livenessProbe", "type": "ProbeSettings"}, + "model": {"key": "model", "type": "str"}, + "model_mount_path": {"key": "modelMountPath", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "readiness_probe": {"key": "readinessProbe", "type": "ProbeSettings"}, + "request_settings": {"key": "requestSettings", "type": "OnlineRequestSettings"}, + "scale_settings": {"key": "scaleSettings", "type": "OnlineScaleSettings"}, + "container_resource_requirements": { + "key": "containerResourceRequirements", + "type": "ContainerResourceRequirements", + }, + } + + def __init__( + self, + *, + code_configuration: Optional["_models.CodeConfiguration"] = None, + description: Optional[str] = None, + environment_id: Optional[str] = None, + environment_variables: Optional[Dict[str, str]] = None, + properties: Optional[Dict[str, str]] = None, + app_insights_enabled: bool = False, + data_collector: Optional["_models.DataCollector"] = None, + egress_public_network_access: Optional[Union[str, "_models.EgressPublicNetworkAccessType"]] = None, + instance_type: Optional[str] = None, + liveness_probe: Optional["_models.ProbeSettings"] = None, + model: Optional[str] = None, + model_mount_path: Optional[str] = None, + readiness_probe: Optional["_models.ProbeSettings"] = None, + request_settings: Optional["_models.OnlineRequestSettings"] = None, + scale_settings: Optional["_models.OnlineScaleSettings"] = None, + container_resource_requirements: Optional["_models.ContainerResourceRequirements"] = None, + **kwargs: Any + ) -> None: + """ + :keyword code_configuration: Code configuration for the endpoint deployment. + :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :keyword description: Description of the endpoint deployment. + :paramtype description: str + :keyword environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :paramtype environment_id: str + :keyword environment_variables: Environment variables configuration for the deployment. + :paramtype environment_variables: dict[str, str] + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword app_insights_enabled: If true, enables Application Insights logging. + :paramtype app_insights_enabled: bool + :keyword data_collector: The mdc configuration, we disable mdc when it's null. + :paramtype data_collector: ~azure.mgmt.machinelearningservices.models.DataCollector + :keyword egress_public_network_access: If Enabled, allow egress public network access. If + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and + "Disabled". + :paramtype egress_public_network_access: str or + ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType + :keyword instance_type: Compute instance type. + :paramtype instance_type: str + :keyword liveness_probe: Liveness probe monitors the health of the container regularly. + :paramtype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :keyword model: The URI path to the model. + :paramtype model: str + :keyword model_mount_path: The path to mount the model in custom container. + :paramtype model_mount_path: str + :keyword readiness_probe: Readiness probe validates if the container is ready to serve traffic. + The properties and defaults are the same as liveness probe. + :paramtype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :keyword request_settings: Request settings for the deployment. + :paramtype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :keyword scale_settings: Scale settings for the deployment. + If it is null or not provided, + it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment + and to DefaultScaleSettings for ManagedOnlineDeployment. + :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + :keyword container_resource_requirements: The resource requirements for the container (cpu and + memory). + :paramtype container_resource_requirements: + ~azure.mgmt.machinelearningservices.models.ContainerResourceRequirements + """ + super().__init__( + code_configuration=code_configuration, + description=description, + environment_id=environment_id, + environment_variables=environment_variables, + properties=properties, + app_insights_enabled=app_insights_enabled, + data_collector=data_collector, + egress_public_network_access=egress_public_network_access, + instance_type=instance_type, + liveness_probe=liveness_probe, + model=model, + model_mount_path=model_mount_path, + readiness_probe=readiness_probe, + request_settings=request_settings, + scale_settings=scale_settings, + **kwargs + ) + self.endpoint_compute_type: str = "Kubernetes" + self.container_resource_requirements = container_resource_requirements + + +class KubernetesProperties(_serialization.Model): + """Kubernetes properties. + + :ivar relay_connection_string: Relay connection string. + :vartype relay_connection_string: str + :ivar service_bus_connection_string: ServiceBus connection string. + :vartype service_bus_connection_string: str + :ivar extension_principal_id: Extension principal-id. + :vartype extension_principal_id: str + :ivar extension_instance_release_train: Extension instance release train. + :vartype extension_instance_release_train: str + :ivar vc_name: VC name. + :vartype vc_name: str + :ivar namespace: Compute namespace. + :vartype namespace: str + :ivar default_instance_type: Default instance type. + :vartype default_instance_type: str + :ivar instance_types: Instance Type Schema. + :vartype instance_types: dict[str, + ~azure.mgmt.machinelearningservices.models.InstanceTypeSchema] + """ + + _attribute_map = { + "relay_connection_string": {"key": "relayConnectionString", "type": "str"}, + "service_bus_connection_string": {"key": "serviceBusConnectionString", "type": "str"}, + "extension_principal_id": {"key": "extensionPrincipalId", "type": "str"}, + "extension_instance_release_train": {"key": "extensionInstanceReleaseTrain", "type": "str"}, + "vc_name": {"key": "vcName", "type": "str"}, + "namespace": {"key": "namespace", "type": "str"}, + "default_instance_type": {"key": "defaultInstanceType", "type": "str"}, + "instance_types": {"key": "instanceTypes", "type": "{InstanceTypeSchema}"}, + } + + def __init__( + self, + *, + relay_connection_string: Optional[str] = None, + service_bus_connection_string: Optional[str] = None, + extension_principal_id: Optional[str] = None, + extension_instance_release_train: Optional[str] = None, + vc_name: Optional[str] = None, + namespace: str = "default", + default_instance_type: Optional[str] = None, + instance_types: Optional[Dict[str, "_models.InstanceTypeSchema"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword relay_connection_string: Relay connection string. + :paramtype relay_connection_string: str + :keyword service_bus_connection_string: ServiceBus connection string. + :paramtype service_bus_connection_string: str + :keyword extension_principal_id: Extension principal-id. + :paramtype extension_principal_id: str + :keyword extension_instance_release_train: Extension instance release train. + :paramtype extension_instance_release_train: str + :keyword vc_name: VC name. + :paramtype vc_name: str + :keyword namespace: Compute namespace. + :paramtype namespace: str + :keyword default_instance_type: Default instance type. + :paramtype default_instance_type: str + :keyword instance_types: Instance Type Schema. + :paramtype instance_types: dict[str, + ~azure.mgmt.machinelearningservices.models.InstanceTypeSchema] + """ + super().__init__(**kwargs) + self.relay_connection_string = relay_connection_string + self.service_bus_connection_string = service_bus_connection_string + self.extension_principal_id = extension_principal_id + self.extension_instance_release_train = extension_instance_release_train + self.vc_name = vc_name + self.namespace = namespace + self.default_instance_type = default_instance_type + self.instance_types = instance_types + + +class LabelCategory(_serialization.Model): + """Label category definition. + + :ivar classes: Dictionary of label classes in this category. + :vartype classes: dict[str, ~azure.mgmt.machinelearningservices.models.LabelClass] + :ivar display_name: Display name of the label category. + :vartype display_name: str + :ivar multi_select: Indicates whether it is allowed to select multiple classes in this + category. Known values are: "Enabled" and "Disabled". + :vartype multi_select: str or ~azure.mgmt.machinelearningservices.models.MultiSelect + """ + + _attribute_map = { + "classes": {"key": "classes", "type": "{LabelClass}"}, + "display_name": {"key": "displayName", "type": "str"}, + "multi_select": {"key": "multiSelect", "type": "str"}, + } + + def __init__( + self, + *, + classes: Optional[Dict[str, "_models.LabelClass"]] = None, + display_name: Optional[str] = None, + multi_select: Optional[Union[str, "_models.MultiSelect"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword classes: Dictionary of label classes in this category. + :paramtype classes: dict[str, ~azure.mgmt.machinelearningservices.models.LabelClass] + :keyword display_name: Display name of the label category. + :paramtype display_name: str + :keyword multi_select: Indicates whether it is allowed to select multiple classes in this + category. Known values are: "Enabled" and "Disabled". + :paramtype multi_select: str or ~azure.mgmt.machinelearningservices.models.MultiSelect + """ + super().__init__(**kwargs) + self.classes = classes + self.display_name = display_name + self.multi_select = multi_select + + +class LabelClass(_serialization.Model): + """Label class definition. + + :ivar display_name: Display name of the label class. + :vartype display_name: str + :ivar subclasses: Dictionary of subclasses of the label class. + :vartype subclasses: dict[str, ~azure.mgmt.machinelearningservices.models.LabelClass] + """ + + _attribute_map = { + "display_name": {"key": "displayName", "type": "str"}, + "subclasses": {"key": "subclasses", "type": "{LabelClass}"}, + } + + def __init__( + self, + *, + display_name: Optional[str] = None, + subclasses: Optional[Dict[str, "_models.LabelClass"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword display_name: Display name of the label class. + :paramtype display_name: str + :keyword subclasses: Dictionary of subclasses of the label class. + :paramtype subclasses: dict[str, ~azure.mgmt.machinelearningservices.models.LabelClass] + """ + super().__init__(**kwargs) + self.display_name = display_name + self.subclasses = subclasses + + +class LabelingDataConfiguration(_serialization.Model): + """Labeling data configuration definition. + + :ivar data_id: Resource Id of the data asset to perform labeling. + :vartype data_id: str + :ivar incremental_data_refresh: Indicates whether to enable incremental data refresh. Known + values are: "Enabled" and "Disabled". + :vartype incremental_data_refresh: str or + ~azure.mgmt.machinelearningservices.models.IncrementalDataRefresh + """ + + _attribute_map = { + "data_id": {"key": "dataId", "type": "str"}, + "incremental_data_refresh": {"key": "incrementalDataRefresh", "type": "str"}, + } + + def __init__( + self, + *, + data_id: Optional[str] = None, + incremental_data_refresh: Optional[Union[str, "_models.IncrementalDataRefresh"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword data_id: Resource Id of the data asset to perform labeling. + :paramtype data_id: str + :keyword incremental_data_refresh: Indicates whether to enable incremental data refresh. Known + values are: "Enabled" and "Disabled". + :paramtype incremental_data_refresh: str or + ~azure.mgmt.machinelearningservices.models.IncrementalDataRefresh + """ + super().__init__(**kwargs) + self.data_id = data_id + self.incremental_data_refresh = incremental_data_refresh + + +class LabelingJob(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.LabelingJobProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "LabelingJobProperties"}, + } + + def __init__(self, *, properties: "_models.LabelingJobProperties", **kwargs: Any) -> None: + """ + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.LabelingJobProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class LabelingJobMediaProperties(_serialization.Model): + """Properties of a labeling job. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + LabelingJobImageProperties, LabelingJobTextProperties + + All required parameters must be populated in order to send to Azure. + + :ivar media_type: [Required] Media type of the job. Required. Known values are: "Image" and + "Text". + :vartype media_type: str or ~azure.mgmt.machinelearningservices.models.MediaType + """ + + _validation = { + "media_type": {"required": True}, + } + + _attribute_map = { + "media_type": {"key": "mediaType", "type": "str"}, + } + + _subtype_map = {"media_type": {"Image": "LabelingJobImageProperties", "Text": "LabelingJobTextProperties"}} + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.media_type: Optional[str] = None + + +class LabelingJobImageProperties(LabelingJobMediaProperties): + """Properties of a labeling job for image data. + + All required parameters must be populated in order to send to Azure. + + :ivar media_type: [Required] Media type of the job. Required. Known values are: "Image" and + "Text". + :vartype media_type: str or ~azure.mgmt.machinelearningservices.models.MediaType + :ivar annotation_type: Annotation type of image labeling job. Known values are: + "Classification", "BoundingBox", and "InstanceSegmentation". + :vartype annotation_type: str or ~azure.mgmt.machinelearningservices.models.ImageAnnotationType + """ + + _validation = { + "media_type": {"required": True}, + } + + _attribute_map = { + "media_type": {"key": "mediaType", "type": "str"}, + "annotation_type": {"key": "annotationType", "type": "str"}, + } + + def __init__( + self, *, annotation_type: Optional[Union[str, "_models.ImageAnnotationType"]] = None, **kwargs: Any + ) -> None: + """ + :keyword annotation_type: Annotation type of image labeling job. Known values are: + "Classification", "BoundingBox", and "InstanceSegmentation". + :paramtype annotation_type: str or + ~azure.mgmt.machinelearningservices.models.ImageAnnotationType + """ + super().__init__(**kwargs) + self.media_type: str = "Image" + self.annotation_type = annotation_type + + +class LabelingJobInstructions(_serialization.Model): + """Instructions for labeling job. + + :ivar uri: The link to a page with detailed labeling instructions for labelers. + :vartype uri: str + """ + + _attribute_map = { + "uri": {"key": "uri", "type": "str"}, + } + + def __init__(self, *, uri: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword uri: The link to a page with detailed labeling instructions for labelers. + :paramtype uri: str + """ + super().__init__(**kwargs) + self.uri = uri + + +class LabelingJobProperties(JobBaseProperties): # pylint: disable=too-many-instance-attributes + """Labeling job definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar component_id: ARM resource ID of the component resource. + :vartype component_id: str + :ivar compute_id: ARM resource ID of the compute resource. + :vartype compute_id: str + :ivar display_name: Display name of job. + :vartype display_name: str + :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :vartype experiment_name: str + :ivar identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar job_type: [Required] Specifies the type of job. Required. Known values are: "AutoML", + "Command", "Labeling", "Sweep", "Pipeline", and "Spark". + :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :ivar notification_setting: Notification setting for the job. + :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting + :ivar secrets_configuration: Configuration for secrets to be made available during runtime. + :vartype secrets_configuration: dict[str, + ~azure.mgmt.machinelearningservices.models.SecretConfiguration] + :ivar services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", and "Scheduled". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus + :ivar created_date_time: Created time of the job in UTC timezone. + :vartype created_date_time: ~datetime.datetime + :ivar data_configuration: Configuration of data used in the job. + :vartype data_configuration: + ~azure.mgmt.machinelearningservices.models.LabelingDataConfiguration + :ivar job_instructions: Labeling instructions of the job. + :vartype job_instructions: ~azure.mgmt.machinelearningservices.models.LabelingJobInstructions + :ivar label_categories: Label categories of the job. + :vartype label_categories: dict[str, ~azure.mgmt.machinelearningservices.models.LabelCategory] + :ivar labeling_job_media_properties: Media type specific properties in the job. + :vartype labeling_job_media_properties: + ~azure.mgmt.machinelearningservices.models.LabelingJobMediaProperties + :ivar ml_assist_configuration: Configuration of MLAssist feature in the job. + :vartype ml_assist_configuration: + ~azure.mgmt.machinelearningservices.models.MLAssistConfiguration + :ivar progress_metrics: Progress metrics of the job. + :vartype progress_metrics: ~azure.mgmt.machinelearningservices.models.ProgressMetrics + :ivar project_id: Internal id of the job(Previously called project). + :vartype project_id: str + :ivar provisioning_state: Specifies the labeling job provisioning state. Known values are: + "Succeeded", "Failed", "Canceled", and "InProgress". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.JobProvisioningState + :ivar status_messages: Status messages of the job. + :vartype status_messages: list[~azure.mgmt.machinelearningservices.models.StatusMessage] + """ + + _validation = { + "job_type": {"required": True}, + "status": {"readonly": True}, + "created_date_time": {"readonly": True}, + "progress_metrics": {"readonly": True}, + "project_id": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "status_messages": {"readonly": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "notification_setting": {"key": "notificationSetting", "type": "NotificationSetting"}, + "secrets_configuration": {"key": "secretsConfiguration", "type": "{SecretConfiguration}"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, + "created_date_time": {"key": "createdDateTime", "type": "iso-8601"}, + "data_configuration": {"key": "dataConfiguration", "type": "LabelingDataConfiguration"}, + "job_instructions": {"key": "jobInstructions", "type": "LabelingJobInstructions"}, + "label_categories": {"key": "labelCategories", "type": "{LabelCategory}"}, + "labeling_job_media_properties": {"key": "labelingJobMediaProperties", "type": "LabelingJobMediaProperties"}, + "ml_assist_configuration": {"key": "mlAssistConfiguration", "type": "MLAssistConfiguration"}, + "progress_metrics": {"key": "progressMetrics", "type": "ProgressMetrics"}, + "project_id": {"key": "projectId", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "status_messages": {"key": "statusMessages", "type": "[StatusMessage]"}, + } + + def __init__( # pylint: disable=too-many-locals + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + component_id: Optional[str] = None, + compute_id: Optional[str] = None, + display_name: Optional[str] = None, + experiment_name: str = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: bool = False, + notification_setting: Optional["_models.NotificationSetting"] = None, + secrets_configuration: Optional[Dict[str, "_models.SecretConfiguration"]] = None, + services: Optional[Dict[str, "_models.JobService"]] = None, + data_configuration: Optional["_models.LabelingDataConfiguration"] = None, + job_instructions: Optional["_models.LabelingJobInstructions"] = None, + label_categories: Optional[Dict[str, "_models.LabelCategory"]] = None, + labeling_job_media_properties: Optional["_models.LabelingJobMediaProperties"] = None, + ml_assist_configuration: Optional["_models.MLAssistConfiguration"] = None, + **kwargs: Any + ) -> None: + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword component_id: ARM resource ID of the component resource. + :paramtype component_id: str + :keyword compute_id: ARM resource ID of the compute resource. + :paramtype compute_id: str + :keyword display_name: Display name of job. + :paramtype display_name: str + :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :paramtype experiment_name: str + :keyword identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword notification_setting: Notification setting for the job. + :paramtype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting + :keyword secrets_configuration: Configuration for secrets to be made available during runtime. + :paramtype secrets_configuration: dict[str, + ~azure.mgmt.machinelearningservices.models.SecretConfiguration] + :keyword services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :keyword data_configuration: Configuration of data used in the job. + :paramtype data_configuration: + ~azure.mgmt.machinelearningservices.models.LabelingDataConfiguration + :keyword job_instructions: Labeling instructions of the job. + :paramtype job_instructions: ~azure.mgmt.machinelearningservices.models.LabelingJobInstructions + :keyword label_categories: Label categories of the job. + :paramtype label_categories: dict[str, + ~azure.mgmt.machinelearningservices.models.LabelCategory] + :keyword labeling_job_media_properties: Media type specific properties in the job. + :paramtype labeling_job_media_properties: + ~azure.mgmt.machinelearningservices.models.LabelingJobMediaProperties + :keyword ml_assist_configuration: Configuration of MLAssist feature in the job. + :paramtype ml_assist_configuration: + ~azure.mgmt.machinelearningservices.models.MLAssistConfiguration + """ + super().__init__( + description=description, + properties=properties, + tags=tags, + component_id=component_id, + compute_id=compute_id, + display_name=display_name, + experiment_name=experiment_name, + identity=identity, + is_archived=is_archived, + notification_setting=notification_setting, + secrets_configuration=secrets_configuration, + services=services, + **kwargs + ) + self.job_type: str = "Labeling" + self.created_date_time = None + self.data_configuration = data_configuration + self.job_instructions = job_instructions + self.label_categories = label_categories + self.labeling_job_media_properties = labeling_job_media_properties + self.ml_assist_configuration = ml_assist_configuration + self.progress_metrics = None + self.project_id = None + self.provisioning_state = None + self.status_messages = None + + +class LabelingJobResourceArmPaginatedResult(_serialization.Model): + """A paginated list of LabelingJob entities. + + :ivar next_link: The link to the next page of LabelingJob objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type LabelingJob. + :vartype value: list[~azure.mgmt.machinelearningservices.models.LabelingJob] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[LabelingJob]"}, + } + + def __init__( + self, *, next_link: Optional[str] = None, value: Optional[List["_models.LabelingJob"]] = None, **kwargs: Any + ) -> None: + """ + :keyword next_link: The link to the next page of LabelingJob objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type LabelingJob. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.LabelingJob] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class LabelingJobTextProperties(LabelingJobMediaProperties): + """Properties of a labeling job for text data. + + All required parameters must be populated in order to send to Azure. + + :ivar media_type: [Required] Media type of the job. Required. Known values are: "Image" and + "Text". + :vartype media_type: str or ~azure.mgmt.machinelearningservices.models.MediaType + :ivar annotation_type: Annotation type of text labeling job. Known values are: "Classification" + and "NamedEntityRecognition". + :vartype annotation_type: str or ~azure.mgmt.machinelearningservices.models.TextAnnotationType + """ + + _validation = { + "media_type": {"required": True}, + } + + _attribute_map = { + "media_type": {"key": "mediaType", "type": "str"}, + "annotation_type": {"key": "annotationType", "type": "str"}, + } + + def __init__( + self, *, annotation_type: Optional[Union[str, "_models.TextAnnotationType"]] = None, **kwargs: Any + ) -> None: + """ + :keyword annotation_type: Annotation type of text labeling job. Known values are: + "Classification" and "NamedEntityRecognition". + :paramtype annotation_type: str or + ~azure.mgmt.machinelearningservices.models.TextAnnotationType + """ + super().__init__(**kwargs) + self.media_type: str = "Text" + self.annotation_type = annotation_type + + +class OneLakeArtifact(_serialization.Model): + """OneLake artifact (data source) configuration. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + LakeHouseArtifact + + All required parameters must be populated in order to send to Azure. + + :ivar artifact_name: [Required] OneLake artifact name. Required. + :vartype artifact_name: str + :ivar artifact_type: [Required] OneLake artifact type. Required. "LakeHouse" + :vartype artifact_type: str or ~azure.mgmt.machinelearningservices.models.OneLakeArtifactType + """ + + _validation = { + "artifact_name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "artifact_type": {"required": True}, + } + + _attribute_map = { + "artifact_name": {"key": "artifactName", "type": "str"}, + "artifact_type": {"key": "artifactType", "type": "str"}, + } + + _subtype_map = {"artifact_type": {"LakeHouse": "LakeHouseArtifact"}} + + def __init__(self, *, artifact_name: str, **kwargs: Any) -> None: + """ + :keyword artifact_name: [Required] OneLake artifact name. Required. + :paramtype artifact_name: str + """ + super().__init__(**kwargs) + self.artifact_name = artifact_name + self.artifact_type: Optional[str] = None + + +class LakeHouseArtifact(OneLakeArtifact): + """LakeHouseArtifact. + + All required parameters must be populated in order to send to Azure. + + :ivar artifact_name: [Required] OneLake artifact name. Required. + :vartype artifact_name: str + :ivar artifact_type: [Required] OneLake artifact type. Required. "LakeHouse" + :vartype artifact_type: str or ~azure.mgmt.machinelearningservices.models.OneLakeArtifactType + """ + + _validation = { + "artifact_name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "artifact_type": {"required": True}, + } + + _attribute_map = { + "artifact_name": {"key": "artifactName", "type": "str"}, + "artifact_type": {"key": "artifactType", "type": "str"}, + } + + def __init__(self, *, artifact_name: str, **kwargs: Any) -> None: + """ + :keyword artifact_name: [Required] OneLake artifact name. Required. + :paramtype artifact_name: str + """ + super().__init__(artifact_name=artifact_name, **kwargs) + self.artifact_type: str = "LakeHouse" + + +class ListAmlUserFeatureResult(_serialization.Model): + """The List Aml user feature operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: The list of AML user facing features. + :vartype value: list[~azure.mgmt.machinelearningservices.models.AmlUserFeature] + :ivar next_link: The URI to fetch the next page of AML user features information. Call + ListNext() with this to fetch the next page of AML user features information. + :vartype next_link: str + """ + + _validation = { + "value": {"readonly": True}, + "next_link": {"readonly": True}, + } + + _attribute_map = { + "value": {"key": "value", "type": "[AmlUserFeature]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.value = None + self.next_link = None + + +class ListNotebookKeysResult(_serialization.Model): + """ListNotebookKeysResult. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar primary_access_key: The primary access key of the Notebook. + :vartype primary_access_key: str + :ivar secondary_access_key: The secondary access key of the Notebook. + :vartype secondary_access_key: str + """ + + _validation = { + "primary_access_key": {"readonly": True}, + "secondary_access_key": {"readonly": True}, + } + + _attribute_map = { + "primary_access_key": {"key": "primaryAccessKey", "type": "str"}, + "secondary_access_key": {"key": "secondaryAccessKey", "type": "str"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.primary_access_key = None + self.secondary_access_key = None + + +class ListStorageAccountKeysResult(_serialization.Model): + """ListStorageAccountKeysResult. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar user_storage_key: The access key of the storage. + :vartype user_storage_key: str + """ + + _validation = { + "user_storage_key": {"readonly": True}, + } + + _attribute_map = { + "user_storage_key": {"key": "userStorageKey", "type": "str"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.user_storage_key = None + + +class ListUsagesResult(_serialization.Model): + """The List Usages operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: The list of AML resource usages. + :vartype value: list[~azure.mgmt.machinelearningservices.models.Usage] + :ivar next_link: The URI to fetch the next page of AML resource usage information. Call + ListNext() with this to fetch the next page of AML resource usage information. + :vartype next_link: str + """ + + _validation = { + "value": {"readonly": True}, + "next_link": {"readonly": True}, + } + + _attribute_map = { + "value": {"key": "value", "type": "[Usage]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.value = None + self.next_link = None + + +class ListWorkspaceKeysResult(_serialization.Model): + """ListWorkspaceKeysResult. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar app_insights_instrumentation_key: The access key of the workspace app insights. + :vartype app_insights_instrumentation_key: str + :ivar container_registry_credentials: + :vartype container_registry_credentials: + ~azure.mgmt.machinelearningservices.models.RegistryListCredentialsResult + :ivar notebook_access_keys: + :vartype notebook_access_keys: + ~azure.mgmt.machinelearningservices.models.ListNotebookKeysResult + :ivar user_storage_arm_id: The arm Id key of the workspace storage. + :vartype user_storage_arm_id: str + :ivar user_storage_key: The access key of the workspace storage. + :vartype user_storage_key: str + """ + + _validation = { + "app_insights_instrumentation_key": {"readonly": True}, + "user_storage_arm_id": {"readonly": True}, + "user_storage_key": {"readonly": True}, + } + + _attribute_map = { + "app_insights_instrumentation_key": {"key": "appInsightsInstrumentationKey", "type": "str"}, + "container_registry_credentials": { + "key": "containerRegistryCredentials", + "type": "RegistryListCredentialsResult", + }, + "notebook_access_keys": {"key": "notebookAccessKeys", "type": "ListNotebookKeysResult"}, + "user_storage_arm_id": {"key": "userStorageArmId", "type": "str"}, + "user_storage_key": {"key": "userStorageKey", "type": "str"}, + } + + def __init__( + self, + *, + container_registry_credentials: Optional["_models.RegistryListCredentialsResult"] = None, + notebook_access_keys: Optional["_models.ListNotebookKeysResult"] = None, + **kwargs: Any + ) -> None: + """ + :keyword container_registry_credentials: + :paramtype container_registry_credentials: + ~azure.mgmt.machinelearningservices.models.RegistryListCredentialsResult + :keyword notebook_access_keys: + :paramtype notebook_access_keys: + ~azure.mgmt.machinelearningservices.models.ListNotebookKeysResult + """ + super().__init__(**kwargs) + self.app_insights_instrumentation_key = None + self.container_registry_credentials = container_registry_credentials + self.notebook_access_keys = notebook_access_keys + self.user_storage_arm_id = None + self.user_storage_key = None + + +class ListWorkspaceQuotas(_serialization.Model): + """The List WorkspaceQuotasByVMFamily operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: The list of Workspace Quotas by VM Family. + :vartype value: list[~azure.mgmt.machinelearningservices.models.ResourceQuota] + :ivar next_link: The URI to fetch the next page of workspace quota information by VM Family. + Call ListNext() with this to fetch the next page of Workspace Quota information. + :vartype next_link: str + """ + + _validation = { + "value": {"readonly": True}, + "next_link": {"readonly": True}, + } + + _attribute_map = { + "value": {"key": "value", "type": "[ResourceQuota]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.value = None + self.next_link = None + + +class LiteralJobInput(JobInput): + """Literal input type. + + All required parameters must be populated in order to send to Azure. + + :ivar description: Description for the input. + :vartype description: str + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :ivar value: [Required] Literal value for the input. Required. + :vartype value: str + """ + + _validation = { + "job_input_type": {"required": True}, + "value": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "value": {"key": "value", "type": "str"}, + } + + def __init__(self, *, value: str, description: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword description: Description for the input. + :paramtype description: str + :keyword value: [Required] Literal value for the input. Required. + :paramtype value: str + """ + super().__init__(description=description, **kwargs) + self.job_input_type: str = "literal" + self.value = value + + +class ManagedComputeIdentity(MonitorComputeIdentityBase): + """Managed compute identity definition. + + All required parameters must be populated in order to send to Azure. + + :ivar compute_identity_type: [Required] Monitor compute identity type enum. Required. Known + values are: "AmlToken" and "ManagedIdentity". + :vartype compute_identity_type: str or + ~azure.mgmt.machinelearningservices.models.MonitorComputeIdentityType + :ivar identity: Managed service identity (system assigned and/or user assigned identities). + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + """ + + _validation = { + "compute_identity_type": {"required": True}, + } + + _attribute_map = { + "compute_identity_type": {"key": "computeIdentityType", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + } + + def __init__(self, *, identity: Optional["_models.ManagedServiceIdentity"] = None, **kwargs: Any) -> None: + """ + :keyword identity: Managed service identity (system assigned and/or user assigned identities). + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + """ + super().__init__(**kwargs) + self.compute_identity_type: str = "ManagedIdentity" + self.identity = identity + + +class ManagedIdentity(IdentityConfiguration): + """Managed identity configuration. + + All required parameters must be populated in order to send to Azure. + + :ivar identity_type: [Required] Specifies the type of identity framework. Required. Known + values are: "Managed", "AMLToken", and "UserIdentity". + :vartype identity_type: str or + ~azure.mgmt.machinelearningservices.models.IdentityConfigurationType + :ivar client_id: Specifies a user-assigned identity by client ID. For system-assigned, do not + set this field. + :vartype client_id: str + :ivar object_id: Specifies a user-assigned identity by object ID. For system-assigned, do not + set this field. + :vartype object_id: str + :ivar resource_id: Specifies a user-assigned identity by ARM resource ID. For system-assigned, + do not set this field. + :vartype resource_id: str + """ + + _validation = { + "identity_type": {"required": True}, + } + + _attribute_map = { + "identity_type": {"key": "identityType", "type": "str"}, + "client_id": {"key": "clientId", "type": "str"}, + "object_id": {"key": "objectId", "type": "str"}, + "resource_id": {"key": "resourceId", "type": "str"}, + } + + def __init__( + self, + *, + client_id: Optional[str] = None, + object_id: Optional[str] = None, + resource_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword client_id: Specifies a user-assigned identity by client ID. For system-assigned, do + not set this field. + :paramtype client_id: str + :keyword object_id: Specifies a user-assigned identity by object ID. For system-assigned, do + not set this field. + :paramtype object_id: str + :keyword resource_id: Specifies a user-assigned identity by ARM resource ID. For + system-assigned, do not set this field. + :paramtype resource_id: str + """ + super().__init__(**kwargs) + self.identity_type: str = "Managed" + self.client_id = client_id + self.object_id = object_id + self.resource_id = resource_id + + +class ManagedIdentityAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): + """ManagedIdentityAuthTypeWorkspaceConnectionProperties. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", "AccessKey", + "ApiKey", and "CustomKeys". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", + "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", + "CognitiveSearch", "CognitiveService", and "CustomKeys". + :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar expiry_time: + :vartype expiry_time: ~datetime.datetime + :ivar metadata: Any object. + :vartype metadata: JSON + :ivar target: + :vartype target: str + :ivar credentials: + :vartype credentials: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionManagedIdentity """ + _validation = { + "auth_type": {"required": True}, + } + _attribute_map = { - "advanced_settings": {"key": "advancedSettings", "type": "str"}, - "ams_gradient": {"key": "amsGradient", "type": "bool"}, - "augmentations": {"key": "augmentations", "type": "str"}, - "beta1": {"key": "beta1", "type": "float"}, - "beta2": {"key": "beta2", "type": "float"}, - "checkpoint_frequency": {"key": "checkpointFrequency", "type": "int"}, - "checkpoint_model": {"key": "checkpointModel", "type": "MLFlowModelJobInput"}, - "checkpoint_run_id": {"key": "checkpointRunId", "type": "str"}, - "distributed": {"key": "distributed", "type": "bool"}, - "early_stopping": {"key": "earlyStopping", "type": "bool"}, - "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "int"}, - "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "int"}, - "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "bool"}, - "evaluation_frequency": {"key": "evaluationFrequency", "type": "int"}, - "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "int"}, - "layers_to_freeze": {"key": "layersToFreeze", "type": "int"}, - "learning_rate": {"key": "learningRate", "type": "float"}, - "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, - "model_name": {"key": "modelName", "type": "str"}, - "momentum": {"key": "momentum", "type": "float"}, - "nesterov": {"key": "nesterov", "type": "bool"}, - "number_of_epochs": {"key": "numberOfEpochs", "type": "int"}, - "number_of_workers": {"key": "numberOfWorkers", "type": "int"}, - "optimizer": {"key": "optimizer", "type": "str"}, - "random_seed": {"key": "randomSeed", "type": "int"}, - "step_lr_gamma": {"key": "stepLRGamma", "type": "float"}, - "step_lr_step_size": {"key": "stepLRStepSize", "type": "int"}, - "training_batch_size": {"key": "trainingBatchSize", "type": "int"}, - "validation_batch_size": {"key": "validationBatchSize", "type": "int"}, - "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "float"}, - "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "int"}, - "weight_decay": {"key": "weightDecay", "type": "float"}, - "training_crop_size": {"key": "trainingCropSize", "type": "int"}, - "validation_crop_size": {"key": "validationCropSize", "type": "int"}, - "validation_resize_size": {"key": "validationResizeSize", "type": "int"}, - "weighted_loss": {"key": "weightedLoss", "type": "int"}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "metadata": {"key": "metadata", "type": "object"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionManagedIdentity"}, } - def __init__( # pylint: disable=too-many-locals + def __init__( self, *, - advanced_settings: Optional[str] = None, - ams_gradient: Optional[bool] = None, - augmentations: Optional[str] = None, - beta1: Optional[float] = None, - beta2: Optional[float] = None, - checkpoint_frequency: Optional[int] = None, - checkpoint_model: Optional["_models.MLFlowModelJobInput"] = None, - checkpoint_run_id: Optional[str] = None, - distributed: Optional[bool] = None, - early_stopping: Optional[bool] = None, - early_stopping_delay: Optional[int] = None, - early_stopping_patience: Optional[int] = None, - enable_onnx_normalization: Optional[bool] = None, - evaluation_frequency: Optional[int] = None, - gradient_accumulation_step: Optional[int] = None, - layers_to_freeze: Optional[int] = None, - learning_rate: Optional[float] = None, - learning_rate_scheduler: Optional[Union[str, "_models.LearningRateScheduler"]] = None, - model_name: Optional[str] = None, - momentum: Optional[float] = None, - nesterov: Optional[bool] = None, - number_of_epochs: Optional[int] = None, - number_of_workers: Optional[int] = None, - optimizer: Optional[Union[str, "_models.StochasticOptimizer"]] = None, - random_seed: Optional[int] = None, - step_lr_gamma: Optional[float] = None, - step_lr_step_size: Optional[int] = None, - training_batch_size: Optional[int] = None, - validation_batch_size: Optional[int] = None, - warmup_cosine_lr_cycles: Optional[float] = None, - warmup_cosine_lr_warmup_epochs: Optional[int] = None, - weight_decay: Optional[float] = None, - training_crop_size: Optional[int] = None, - validation_crop_size: Optional[int] = None, - validation_resize_size: Optional[int] = None, - weighted_loss: Optional[int] = None, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, + expiry_time: Optional[datetime.datetime] = None, + metadata: Optional[JSON] = None, + target: Optional[str] = None, + credentials: Optional["_models.WorkspaceConnectionManagedIdentity"] = None, **kwargs: Any ) -> None: """ - :keyword advanced_settings: Settings for advanced scenarios. - :paramtype advanced_settings: str - :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :paramtype ams_gradient: bool - :keyword augmentations: Settings for using Augmentations. - :paramtype augmentations: str - :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta1: float - :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta2: float - :keyword checkpoint_frequency: Frequency to store model checkpoints. Must be a positive - integer. - :paramtype checkpoint_frequency: int - :keyword checkpoint_model: The pretrained checkpoint model for incremental training. - :paramtype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput - :keyword checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for - incremental training. - :paramtype checkpoint_run_id: str - :keyword distributed: Whether to use distributed training. - :paramtype distributed: bool - :keyword early_stopping: Enable early stopping logic during training. - :paramtype early_stopping: bool - :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait - before primary metric improvement - is tracked for early stopping. Must be a positive integer. - :paramtype early_stopping_delay: int - :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :paramtype early_stopping_patience: int - :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. - :paramtype enable_onnx_normalization: bool - :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. - Must be a positive integer. - :paramtype evaluation_frequency: int - :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :paramtype gradient_accumulation_step: int - :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive - integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype layers_to_freeze: int - :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :paramtype learning_rate: float - :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. Known values are: "None", "WarmupCosine", and "Step". - :paramtype learning_rate_scheduler: str or - ~azure.mgmt.machinelearningservices.models.LearningRateScheduler - :keyword model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype model_name: str - :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, - 1]. - :paramtype momentum: float - :keyword nesterov: Enable nesterov when optimizer is 'sgd'. - :paramtype nesterov: bool - :keyword number_of_epochs: Number of training epochs. Must be a positive integer. - :paramtype number_of_epochs: int - :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. - :paramtype number_of_workers: int - :keyword optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", and "Adamw". - :paramtype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer - :keyword random_seed: Random seed to be used when using deterministic training. - :paramtype random_seed: int - :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float - in the range [0, 1]. - :paramtype step_lr_gamma: float - :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be - a positive integer. - :paramtype step_lr_step_size: int - :keyword training_batch_size: Training batch size. Must be a positive integer. - :paramtype training_batch_size: int - :keyword validation_batch_size: Validation batch size. Must be a positive integer. - :paramtype validation_batch_size: int - :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :paramtype warmup_cosine_lr_cycles: float - :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :paramtype warmup_cosine_lr_warmup_epochs: int - :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must - be a float in the range[0, 1]. - :paramtype weight_decay: float - :keyword training_crop_size: Image crop size that is input to the neural network for the - training dataset. Must be a positive integer. - :paramtype training_crop_size: int - :keyword validation_crop_size: Image crop size that is input to the neural network for the - validation dataset. Must be a positive integer. - :paramtype validation_crop_size: int - :keyword validation_resize_size: Image size to which to resize before cropping for validation - dataset. Must be a positive integer. - :paramtype validation_resize_size: int - :keyword weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. - 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be - 0 or 1 or 2. - :paramtype weighted_loss: int - """ - super().__init__( - advanced_settings=advanced_settings, - ams_gradient=ams_gradient, - augmentations=augmentations, - beta1=beta1, - beta2=beta2, - checkpoint_frequency=checkpoint_frequency, - checkpoint_model=checkpoint_model, - checkpoint_run_id=checkpoint_run_id, - distributed=distributed, - early_stopping=early_stopping, - early_stopping_delay=early_stopping_delay, - early_stopping_patience=early_stopping_patience, - enable_onnx_normalization=enable_onnx_normalization, - evaluation_frequency=evaluation_frequency, - gradient_accumulation_step=gradient_accumulation_step, - layers_to_freeze=layers_to_freeze, - learning_rate=learning_rate, - learning_rate_scheduler=learning_rate_scheduler, - model_name=model_name, - momentum=momentum, - nesterov=nesterov, - number_of_epochs=number_of_epochs, - number_of_workers=number_of_workers, - optimizer=optimizer, - random_seed=random_seed, - step_lr_gamma=step_lr_gamma, - step_lr_step_size=step_lr_step_size, - training_batch_size=training_batch_size, - validation_batch_size=validation_batch_size, - warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, - warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, - weight_decay=weight_decay, - **kwargs - ) - self.training_crop_size = training_crop_size - self.validation_crop_size = validation_crop_size - self.validation_resize_size = validation_resize_size - self.weighted_loss = weighted_loss + :keyword category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", + "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", + "CognitiveSearch", "CognitiveService", and "CustomKeys". + :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :keyword expiry_time: + :paramtype expiry_time: ~datetime.datetime + :keyword metadata: Any object. + :paramtype metadata: JSON + :keyword target: + :paramtype target: str + :keyword credentials: + :paramtype credentials: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionManagedIdentity + """ + super().__init__(category=category, expiry_time=expiry_time, metadata=metadata, target=target, **kwargs) + self.auth_type: str = "ManagedIdentity" + self.credentials = credentials -class ImageModelSettingsObjectDetection(ImageModelSettings): # pylint: disable=too-many-instance-attributes - """Settings used for training the model. - For more information on the available settings please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. +class ManagedNetworkProvisionOptions(_serialization.Model): + """Managed Network Provisioning options for managed network of a machine learning workspace. - :ivar advanced_settings: Settings for advanced scenarios. - :vartype advanced_settings: str - :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :vartype ams_gradient: bool - :ivar augmentations: Settings for using Augmentations. - :vartype augmentations: str - :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta1: float - :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta2: float - :ivar checkpoint_frequency: Frequency to store model checkpoints. Must be a positive integer. - :vartype checkpoint_frequency: int - :ivar checkpoint_model: The pretrained checkpoint model for incremental training. - :vartype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput - :ivar checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for - incremental training. - :vartype checkpoint_run_id: str - :ivar distributed: Whether to use distributed training. - :vartype distributed: bool - :ivar early_stopping: Enable early stopping logic during training. - :vartype early_stopping: bool - :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before - primary metric improvement - is tracked for early stopping. Must be a positive integer. - :vartype early_stopping_delay: int - :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :vartype early_stopping_patience: int - :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. - :vartype enable_onnx_normalization: bool - :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must - be a positive integer. - :vartype evaluation_frequency: int - :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :vartype gradient_accumulation_step: int - :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype layers_to_freeze: int - :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :vartype learning_rate: float - :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. Known values are: "None", "WarmupCosine", and "Step". - :vartype learning_rate_scheduler: str or - ~azure.mgmt.machinelearningservices.models.LearningRateScheduler - :ivar model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype model_name: str - :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. - :vartype momentum: float - :ivar nesterov: Enable nesterov when optimizer is 'sgd'. - :vartype nesterov: bool - :ivar number_of_epochs: Number of training epochs. Must be a positive integer. - :vartype number_of_epochs: int - :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. - :vartype number_of_workers: int - :ivar optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", and "Adamw". - :vartype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer - :ivar random_seed: Random seed to be used when using deterministic training. - :vartype random_seed: int - :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in - the range [0, 1]. - :vartype step_lr_gamma: float - :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a - positive integer. - :vartype step_lr_step_size: int - :ivar training_batch_size: Training batch size. Must be a positive integer. - :vartype training_batch_size: int - :ivar validation_batch_size: Validation batch size. Must be a positive integer. - :vartype validation_batch_size: int - :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :vartype warmup_cosine_lr_cycles: float - :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :vartype warmup_cosine_lr_warmup_epochs: int - :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be - a float in the range[0, 1]. - :vartype weight_decay: float - :ivar box_detections_per_image: Maximum number of detections per image, for all classes. Must - be a positive integer. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype box_detections_per_image: int - :ivar box_score_threshold: During inference, only return proposals with a classification score - greater than - BoxScoreThreshold. Must be a float in the range[0, 1]. - :vartype box_score_threshold: float - :ivar image_size: Image size for train and validation. Must be a positive integer. - Note: The training run may get into CUDA OOM if the size is too big. - Note: This settings is only supported for the 'yolov5' algorithm. - :vartype image_size: int - :ivar max_size: Maximum size of the image to be rescaled before feeding it to the backbone. - Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype max_size: int - :ivar min_size: Minimum size of the image to be rescaled before feeding it to the backbone. - Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype min_size: int - :ivar model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. - Note: training run may get into CUDA OOM if the model size is too big. - Note: This settings is only supported for the 'yolov5' algorithm. Known values are: "None", - "Small", "Medium", "Large", and "ExtraLarge". - :vartype model_size: str or ~azure.mgmt.machinelearningservices.models.ModelSize - :ivar multi_scale: Enable multi-scale image by varying image size by +/- 50%. - Note: training run may get into CUDA OOM if no sufficient GPU memory. - Note: This settings is only supported for the 'yolov5' algorithm. - :vartype multi_scale: bool - :ivar nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be a - float in the range [0, 1]. - :vartype nms_iou_threshold: float - :ivar tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must not - be - None to enable small object detection logic. A string containing two integers in mxn format. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype tile_grid_size: str - :ivar tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be float - in the range [0, 1). - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype tile_overlap_ratio: float - :ivar tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging - predictions from tiles and image. - Used in validation/ inference. Must be float in the range [0, 1]. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype tile_predictions_nms_threshold: float - :ivar validation_iou_threshold: IOU threshold to use when computing validation metric. Must be - float in the range [0, 1]. - :vartype validation_iou_threshold: float - :ivar validation_metric_type: Metric computation method to use for validation metrics. Known - values are: "None", "Coco", "Voc", and "CocoVoc". - :vartype validation_metric_type: str or - ~azure.mgmt.machinelearningservices.models.ValidationMetricType + :ivar include_spark: + :vartype include_spark: bool """ _attribute_map = { - "advanced_settings": {"key": "advancedSettings", "type": "str"}, - "ams_gradient": {"key": "amsGradient", "type": "bool"}, - "augmentations": {"key": "augmentations", "type": "str"}, - "beta1": {"key": "beta1", "type": "float"}, - "beta2": {"key": "beta2", "type": "float"}, - "checkpoint_frequency": {"key": "checkpointFrequency", "type": "int"}, - "checkpoint_model": {"key": "checkpointModel", "type": "MLFlowModelJobInput"}, - "checkpoint_run_id": {"key": "checkpointRunId", "type": "str"}, - "distributed": {"key": "distributed", "type": "bool"}, - "early_stopping": {"key": "earlyStopping", "type": "bool"}, - "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "int"}, - "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "int"}, - "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "bool"}, - "evaluation_frequency": {"key": "evaluationFrequency", "type": "int"}, - "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "int"}, - "layers_to_freeze": {"key": "layersToFreeze", "type": "int"}, - "learning_rate": {"key": "learningRate", "type": "float"}, - "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, - "model_name": {"key": "modelName", "type": "str"}, - "momentum": {"key": "momentum", "type": "float"}, - "nesterov": {"key": "nesterov", "type": "bool"}, - "number_of_epochs": {"key": "numberOfEpochs", "type": "int"}, - "number_of_workers": {"key": "numberOfWorkers", "type": "int"}, - "optimizer": {"key": "optimizer", "type": "str"}, - "random_seed": {"key": "randomSeed", "type": "int"}, - "step_lr_gamma": {"key": "stepLRGamma", "type": "float"}, - "step_lr_step_size": {"key": "stepLRStepSize", "type": "int"}, - "training_batch_size": {"key": "trainingBatchSize", "type": "int"}, - "validation_batch_size": {"key": "validationBatchSize", "type": "int"}, - "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "float"}, - "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "int"}, - "weight_decay": {"key": "weightDecay", "type": "float"}, - "box_detections_per_image": {"key": "boxDetectionsPerImage", "type": "int"}, - "box_score_threshold": {"key": "boxScoreThreshold", "type": "float"}, - "image_size": {"key": "imageSize", "type": "int"}, - "max_size": {"key": "maxSize", "type": "int"}, - "min_size": {"key": "minSize", "type": "int"}, - "model_size": {"key": "modelSize", "type": "str"}, - "multi_scale": {"key": "multiScale", "type": "bool"}, - "nms_iou_threshold": {"key": "nmsIouThreshold", "type": "float"}, - "tile_grid_size": {"key": "tileGridSize", "type": "str"}, - "tile_overlap_ratio": {"key": "tileOverlapRatio", "type": "float"}, - "tile_predictions_nms_threshold": {"key": "tilePredictionsNmsThreshold", "type": "float"}, - "validation_iou_threshold": {"key": "validationIouThreshold", "type": "float"}, - "validation_metric_type": {"key": "validationMetricType", "type": "str"}, + "include_spark": {"key": "includeSpark", "type": "bool"}, } - def __init__( # pylint: disable=too-many-locals + def __init__(self, *, include_spark: Optional[bool] = None, **kwargs: Any) -> None: + """ + :keyword include_spark: + :paramtype include_spark: bool + """ + super().__init__(**kwargs) + self.include_spark = include_spark + + +class ManagedNetworkProvisionStatus(_serialization.Model): + """Status of the Provisioning for the managed network of a machine learning workspace. + + :ivar spark_ready: + :vartype spark_ready: bool + :ivar status: Status for the managed network of a machine learning workspace. Known values are: + "Inactive" and "Active". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.ManagedNetworkStatus + """ + + _attribute_map = { + "spark_ready": {"key": "sparkReady", "type": "bool"}, + "status": {"key": "status", "type": "str"}, + } + + def __init__( self, *, - advanced_settings: Optional[str] = None, - ams_gradient: Optional[bool] = None, - augmentations: Optional[str] = None, - beta1: Optional[float] = None, - beta2: Optional[float] = None, - checkpoint_frequency: Optional[int] = None, - checkpoint_model: Optional["_models.MLFlowModelJobInput"] = None, - checkpoint_run_id: Optional[str] = None, - distributed: Optional[bool] = None, - early_stopping: Optional[bool] = None, - early_stopping_delay: Optional[int] = None, - early_stopping_patience: Optional[int] = None, - enable_onnx_normalization: Optional[bool] = None, - evaluation_frequency: Optional[int] = None, - gradient_accumulation_step: Optional[int] = None, - layers_to_freeze: Optional[int] = None, - learning_rate: Optional[float] = None, - learning_rate_scheduler: Optional[Union[str, "_models.LearningRateScheduler"]] = None, - model_name: Optional[str] = None, - momentum: Optional[float] = None, - nesterov: Optional[bool] = None, - number_of_epochs: Optional[int] = None, - number_of_workers: Optional[int] = None, - optimizer: Optional[Union[str, "_models.StochasticOptimizer"]] = None, - random_seed: Optional[int] = None, - step_lr_gamma: Optional[float] = None, - step_lr_step_size: Optional[int] = None, - training_batch_size: Optional[int] = None, - validation_batch_size: Optional[int] = None, - warmup_cosine_lr_cycles: Optional[float] = None, - warmup_cosine_lr_warmup_epochs: Optional[int] = None, - weight_decay: Optional[float] = None, - box_detections_per_image: Optional[int] = None, - box_score_threshold: Optional[float] = None, - image_size: Optional[int] = None, - max_size: Optional[int] = None, - min_size: Optional[int] = None, - model_size: Optional[Union[str, "_models.ModelSize"]] = None, - multi_scale: Optional[bool] = None, - nms_iou_threshold: Optional[float] = None, - tile_grid_size: Optional[str] = None, - tile_overlap_ratio: Optional[float] = None, - tile_predictions_nms_threshold: Optional[float] = None, - validation_iou_threshold: Optional[float] = None, - validation_metric_type: Optional[Union[str, "_models.ValidationMetricType"]] = None, + spark_ready: Optional[bool] = None, + status: Optional[Union[str, "_models.ManagedNetworkStatus"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword spark_ready: + :paramtype spark_ready: bool + :keyword status: Status for the managed network of a machine learning workspace. Known values + are: "Inactive" and "Active". + :paramtype status: str or ~azure.mgmt.machinelearningservices.models.ManagedNetworkStatus + """ + super().__init__(**kwargs) + self.spark_ready = spark_ready + self.status = status + + +class ManagedNetworkSettings(_serialization.Model): + """Managed Network settings for a machine learning workspace. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar isolation_mode: Isolation mode for the managed network of a machine learning workspace. + Known values are: "Disabled", "AllowInternetOutbound", and "AllowOnlyApprovedOutbound". + :vartype isolation_mode: str or ~azure.mgmt.machinelearningservices.models.IsolationMode + :ivar network_id: + :vartype network_id: str + :ivar outbound_rules: Dictionary of :code:``. + :vartype outbound_rules: dict[str, ~azure.mgmt.machinelearningservices.models.OutboundRule] + :ivar status: Status of the Provisioning for the managed network of a machine learning + workspace. + :vartype status: ~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionStatus + """ + + _validation = { + "network_id": {"readonly": True}, + } + + _attribute_map = { + "isolation_mode": {"key": "isolationMode", "type": "str"}, + "network_id": {"key": "networkId", "type": "str"}, + "outbound_rules": {"key": "outboundRules", "type": "{OutboundRule}"}, + "status": {"key": "status", "type": "ManagedNetworkProvisionStatus"}, + } + + def __init__( + self, + *, + isolation_mode: Optional[Union[str, "_models.IsolationMode"]] = None, + outbound_rules: Optional[Dict[str, "_models.OutboundRule"]] = None, + status: Optional["_models.ManagedNetworkProvisionStatus"] = None, **kwargs: Any ) -> None: """ - :keyword advanced_settings: Settings for advanced scenarios. - :paramtype advanced_settings: str - :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :paramtype ams_gradient: bool - :keyword augmentations: Settings for using Augmentations. - :paramtype augmentations: str - :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta1: float - :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta2: float - :keyword checkpoint_frequency: Frequency to store model checkpoints. Must be a positive - integer. - :paramtype checkpoint_frequency: int - :keyword checkpoint_model: The pretrained checkpoint model for incremental training. - :paramtype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput - :keyword checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for - incremental training. - :paramtype checkpoint_run_id: str - :keyword distributed: Whether to use distributed training. - :paramtype distributed: bool - :keyword early_stopping: Enable early stopping logic during training. - :paramtype early_stopping: bool - :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait - before primary metric improvement - is tracked for early stopping. Must be a positive integer. - :paramtype early_stopping_delay: int - :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :paramtype early_stopping_patience: int - :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. - :paramtype enable_onnx_normalization: bool - :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. - Must be a positive integer. - :paramtype evaluation_frequency: int - :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :paramtype gradient_accumulation_step: int - :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive - integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype layers_to_freeze: int - :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :paramtype learning_rate: float - :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. Known values are: "None", "WarmupCosine", and "Step". - :paramtype learning_rate_scheduler: str or - ~azure.mgmt.machinelearningservices.models.LearningRateScheduler - :keyword model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype model_name: str - :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, - 1]. - :paramtype momentum: float - :keyword nesterov: Enable nesterov when optimizer is 'sgd'. - :paramtype nesterov: bool - :keyword number_of_epochs: Number of training epochs. Must be a positive integer. - :paramtype number_of_epochs: int - :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. - :paramtype number_of_workers: int - :keyword optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", and "Adamw". - :paramtype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer - :keyword random_seed: Random seed to be used when using deterministic training. - :paramtype random_seed: int - :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float - in the range [0, 1]. - :paramtype step_lr_gamma: float - :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be - a positive integer. - :paramtype step_lr_step_size: int - :keyword training_batch_size: Training batch size. Must be a positive integer. - :paramtype training_batch_size: int - :keyword validation_batch_size: Validation batch size. Must be a positive integer. - :paramtype validation_batch_size: int - :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :paramtype warmup_cosine_lr_cycles: float - :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :paramtype warmup_cosine_lr_warmup_epochs: int - :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must - be a float in the range[0, 1]. - :paramtype weight_decay: float - :keyword box_detections_per_image: Maximum number of detections per image, for all classes. - Must be a positive integer. - Note: This settings is not supported for the 'yolov5' algorithm. - :paramtype box_detections_per_image: int - :keyword box_score_threshold: During inference, only return proposals with a classification - score greater than - BoxScoreThreshold. Must be a float in the range[0, 1]. - :paramtype box_score_threshold: float - :keyword image_size: Image size for train and validation. Must be a positive integer. - Note: The training run may get into CUDA OOM if the size is too big. - Note: This settings is only supported for the 'yolov5' algorithm. - :paramtype image_size: int - :keyword max_size: Maximum size of the image to be rescaled before feeding it to the backbone. - Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. - Note: This settings is not supported for the 'yolov5' algorithm. - :paramtype max_size: int - :keyword min_size: Minimum size of the image to be rescaled before feeding it to the backbone. - Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. - Note: This settings is not supported for the 'yolov5' algorithm. - :paramtype min_size: int - :keyword model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. - Note: training run may get into CUDA OOM if the model size is too big. - Note: This settings is only supported for the 'yolov5' algorithm. Known values are: "None", - "Small", "Medium", "Large", and "ExtraLarge". - :paramtype model_size: str or ~azure.mgmt.machinelearningservices.models.ModelSize - :keyword multi_scale: Enable multi-scale image by varying image size by +/- 50%. - Note: training run may get into CUDA OOM if no sufficient GPU memory. - Note: This settings is only supported for the 'yolov5' algorithm. - :paramtype multi_scale: bool - :keyword nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be - a float in the range [0, 1]. - :paramtype nms_iou_threshold: float - :keyword tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must - not be - None to enable small object detection logic. A string containing two integers in mxn format. - Note: This settings is not supported for the 'yolov5' algorithm. - :paramtype tile_grid_size: str - :keyword tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be - float in the range [0, 1). - Note: This settings is not supported for the 'yolov5' algorithm. - :paramtype tile_overlap_ratio: float - :keyword tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging - predictions from tiles and image. - Used in validation/ inference. Must be float in the range [0, 1]. - Note: This settings is not supported for the 'yolov5' algorithm. - :paramtype tile_predictions_nms_threshold: float - :keyword validation_iou_threshold: IOU threshold to use when computing validation metric. Must - be float in the range [0, 1]. - :paramtype validation_iou_threshold: float - :keyword validation_metric_type: Metric computation method to use for validation metrics. Known - values are: "None", "Coco", "Voc", and "CocoVoc". - :paramtype validation_metric_type: str or - ~azure.mgmt.machinelearningservices.models.ValidationMetricType + :keyword isolation_mode: Isolation mode for the managed network of a machine learning + workspace. Known values are: "Disabled", "AllowInternetOutbound", and + "AllowOnlyApprovedOutbound". + :paramtype isolation_mode: str or ~azure.mgmt.machinelearningservices.models.IsolationMode + :keyword outbound_rules: Dictionary of :code:``. + :paramtype outbound_rules: dict[str, ~azure.mgmt.machinelearningservices.models.OutboundRule] + :keyword status: Status of the Provisioning for the managed network of a machine learning + workspace. + :paramtype status: ~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionStatus """ - super().__init__( - advanced_settings=advanced_settings, - ams_gradient=ams_gradient, - augmentations=augmentations, - beta1=beta1, - beta2=beta2, - checkpoint_frequency=checkpoint_frequency, - checkpoint_model=checkpoint_model, - checkpoint_run_id=checkpoint_run_id, - distributed=distributed, - early_stopping=early_stopping, - early_stopping_delay=early_stopping_delay, - early_stopping_patience=early_stopping_patience, - enable_onnx_normalization=enable_onnx_normalization, - evaluation_frequency=evaluation_frequency, - gradient_accumulation_step=gradient_accumulation_step, - layers_to_freeze=layers_to_freeze, - learning_rate=learning_rate, - learning_rate_scheduler=learning_rate_scheduler, - model_name=model_name, - momentum=momentum, - nesterov=nesterov, - number_of_epochs=number_of_epochs, - number_of_workers=number_of_workers, - optimizer=optimizer, - random_seed=random_seed, - step_lr_gamma=step_lr_gamma, - step_lr_step_size=step_lr_step_size, - training_batch_size=training_batch_size, - validation_batch_size=validation_batch_size, - warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, - warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, - weight_decay=weight_decay, - **kwargs - ) - self.box_detections_per_image = box_detections_per_image - self.box_score_threshold = box_score_threshold - self.image_size = image_size - self.max_size = max_size - self.min_size = min_size - self.model_size = model_size - self.multi_scale = multi_scale - self.nms_iou_threshold = nms_iou_threshold - self.tile_grid_size = tile_grid_size - self.tile_overlap_ratio = tile_overlap_ratio - self.tile_predictions_nms_threshold = tile_predictions_nms_threshold - self.validation_iou_threshold = validation_iou_threshold - self.validation_metric_type = validation_metric_type + super().__init__(**kwargs) + self.isolation_mode = isolation_mode + self.network_id = None + self.outbound_rules = outbound_rules + self.status = status -class ImageObjectDetection(ImageObjectDetectionBase, AutoMLVertical): # pylint: disable=too-many-instance-attributes - """Image Object Detection. Object detection is used to identify objects in an image and locate - each object with a - bounding box e.g. locate all dogs and cats in an image and draw a bounding box around each. +class ManagedOnlineDeployment(OnlineDeploymentProperties): # pylint: disable=too-many-instance-attributes + """Properties specific to a ManagedOnlineDeployment. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", - "Warning", "Error", and "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: - "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", and "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: [Required] Training data input. Required. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float - :ivar model_settings: Settings used for training the model. - :vartype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] - :ivar primary_metric: Primary metric to optimize for this task. "MeanAveragePrecision" - :vartype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ObjectDetectionPrimaryMetrics + :ivar code_configuration: Code configuration for the endpoint deployment. + :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :ivar description: Description of the endpoint deployment. + :vartype description: str + :ivar environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :vartype environment_id: str + :ivar environment_variables: Environment variables configuration for the deployment. + :vartype environment_variables: dict[str, str] + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar app_insights_enabled: If true, enables Application Insights logging. + :vartype app_insights_enabled: bool + :ivar data_collector: The mdc configuration, we disable mdc when it's null. + :vartype data_collector: ~azure.mgmt.machinelearningservices.models.DataCollector + :ivar egress_public_network_access: If Enabled, allow egress public network access. If + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and + "Disabled". + :vartype egress_public_network_access: str or + ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType + :ivar endpoint_compute_type: [Required] The compute type of the endpoint. Required. Known + values are: "Managed", "Kubernetes", and "AzureMLCompute". + :vartype endpoint_compute_type: str or + ~azure.mgmt.machinelearningservices.models.EndpointComputeType + :ivar instance_type: Compute instance type. + :vartype instance_type: str + :ivar liveness_probe: Liveness probe monitors the health of the container regularly. + :vartype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :ivar model: The URI path to the model. + :vartype model: str + :ivar model_mount_path: The path to mount the model in custom container. + :vartype model_mount_path: str + :ivar provisioning_state: Provisioning state for the endpoint deployment. Known values are: + "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState + :ivar readiness_probe: Readiness probe validates if the container is ready to serve traffic. + The properties and defaults are the same as liveness probe. + :vartype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :ivar request_settings: Request settings for the deployment. + :vartype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :ivar scale_settings: Scale settings for the deployment. + If it is null or not provided, + it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment + and to DefaultScaleSettings for ManagedOnlineDeployment. + :vartype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings """ _validation = { - "task_type": {"required": True}, - "training_data": {"required": True}, - "limit_settings": {"required": True}, + "endpoint_compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - "log_verbosity": {"key": "logVerbosity", "type": "str"}, - "target_column_name": {"key": "targetColumnName", "type": "str"}, - "task_type": {"key": "taskType", "type": "str"}, - "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, - "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, - "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, - "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, - "validation_data_size": {"key": "validationDataSize", "type": "float"}, - "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsObjectDetection"}, - "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsObjectDetection]"}, - "primary_metric": {"key": "primaryMetric", "type": "str"}, + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, + "description": {"key": "description", "type": "str"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "properties": {"key": "properties", "type": "{str}"}, + "app_insights_enabled": {"key": "appInsightsEnabled", "type": "bool"}, + "data_collector": {"key": "dataCollector", "type": "DataCollector"}, + "egress_public_network_access": {"key": "egressPublicNetworkAccess", "type": "str"}, + "endpoint_compute_type": {"key": "endpointComputeType", "type": "str"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "liveness_probe": {"key": "livenessProbe", "type": "ProbeSettings"}, + "model": {"key": "model", "type": "str"}, + "model_mount_path": {"key": "modelMountPath", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "readiness_probe": {"key": "readinessProbe", "type": "ProbeSettings"}, + "request_settings": {"key": "requestSettings", "type": "OnlineRequestSettings"}, + "scale_settings": {"key": "scaleSettings", "type": "OnlineScaleSettings"}, } def __init__( self, *, - training_data: "_models.MLTableJobInput", - limit_settings: "_models.ImageLimitSettings", - log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, - target_column_name: Optional[str] = None, - sweep_settings: Optional["_models.ImageSweepSettings"] = None, - validation_data: Optional["_models.MLTableJobInput"] = None, - validation_data_size: Optional[float] = None, - model_settings: Optional["_models.ImageModelSettingsObjectDetection"] = None, - search_space: Optional[List["_models.ImageModelDistributionSettingsObjectDetection"]] = None, - primary_metric: Optional[Union[str, "_models.ObjectDetectionPrimaryMetrics"]] = None, + code_configuration: Optional["_models.CodeConfiguration"] = None, + description: Optional[str] = None, + environment_id: Optional[str] = None, + environment_variables: Optional[Dict[str, str]] = None, + properties: Optional[Dict[str, str]] = None, + app_insights_enabled: bool = False, + data_collector: Optional["_models.DataCollector"] = None, + egress_public_network_access: Optional[Union[str, "_models.EgressPublicNetworkAccessType"]] = None, + instance_type: Optional[str] = None, + liveness_probe: Optional["_models.ProbeSettings"] = None, + model: Optional[str] = None, + model_mount_path: Optional[str] = None, + readiness_probe: Optional["_models.ProbeSettings"] = None, + request_settings: Optional["_models.OnlineRequestSettings"] = None, + scale_settings: Optional["_models.OnlineScaleSettings"] = None, **kwargs: Any ) -> None: """ - :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", - "Warning", "Error", and "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: [Required] Training data input. Required. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. - :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype validation_data_size: float - :keyword model_settings: Settings used for training the model. - :paramtype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection - :keyword search_space: Search space for sampling different combinations of models and their - hyperparameters. - :paramtype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] - :keyword primary_metric: Primary metric to optimize for this task. "MeanAveragePrecision" - :paramtype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ObjectDetectionPrimaryMetrics + :keyword code_configuration: Code configuration for the endpoint deployment. + :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :keyword description: Description of the endpoint deployment. + :paramtype description: str + :keyword environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :paramtype environment_id: str + :keyword environment_variables: Environment variables configuration for the deployment. + :paramtype environment_variables: dict[str, str] + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword app_insights_enabled: If true, enables Application Insights logging. + :paramtype app_insights_enabled: bool + :keyword data_collector: The mdc configuration, we disable mdc when it's null. + :paramtype data_collector: ~azure.mgmt.machinelearningservices.models.DataCollector + :keyword egress_public_network_access: If Enabled, allow egress public network access. If + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and + "Disabled". + :paramtype egress_public_network_access: str or + ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType + :keyword instance_type: Compute instance type. + :paramtype instance_type: str + :keyword liveness_probe: Liveness probe monitors the health of the container regularly. + :paramtype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :keyword model: The URI path to the model. + :paramtype model: str + :keyword model_mount_path: The path to mount the model in custom container. + :paramtype model_mount_path: str + :keyword readiness_probe: Readiness probe validates if the container is ready to serve traffic. + The properties and defaults are the same as liveness probe. + :paramtype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :keyword request_settings: Request settings for the deployment. + :paramtype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :keyword scale_settings: Scale settings for the deployment. + If it is null or not provided, + it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment + and to DefaultScaleSettings for ManagedOnlineDeployment. + :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings """ super().__init__( - limit_settings=limit_settings, - sweep_settings=sweep_settings, - validation_data=validation_data, - validation_data_size=validation_data_size, - model_settings=model_settings, - search_space=search_space, - log_verbosity=log_verbosity, - target_column_name=target_column_name, - training_data=training_data, + code_configuration=code_configuration, + description=description, + environment_id=environment_id, + environment_variables=environment_variables, + properties=properties, + app_insights_enabled=app_insights_enabled, + data_collector=data_collector, + egress_public_network_access=egress_public_network_access, + instance_type=instance_type, + liveness_probe=liveness_probe, + model=model, + model_mount_path=model_mount_path, + readiness_probe=readiness_probe, + request_settings=request_settings, + scale_settings=scale_settings, **kwargs ) - self.log_verbosity = log_verbosity - self.target_column_name = target_column_name - self.task_type: str = "ImageObjectDetection" - self.training_data = training_data - self.primary_metric = primary_metric - self.limit_settings = limit_settings - self.sweep_settings = sweep_settings - self.validation_data = validation_data - self.validation_data_size = validation_data_size - self.model_settings = model_settings - self.search_space = search_space + self.endpoint_compute_type: str = "Managed" -class ImageSweepSettings(_serialization.Model): - """Model sweeping and hyperparameter sweeping related settings. +class ManagedServiceIdentity(_serialization.Model): + """Managed service identity (system assigned and/or user assigned identities). + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar early_termination: Type of early termination policy. - :vartype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy - :ivar sampling_algorithm: [Required] Type of the hyperparameter sampling algorithms. Required. - Known values are: "Grid", "Random", and "Bayesian". - :vartype sampling_algorithm: str or - ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + :ivar principal_id: The service principal ID of the system assigned identity. This property + will only be provided for a system assigned identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of the system assigned identity. This property will only be + provided for a system assigned identity. + :vartype tenant_id: str + :ivar type: Type of managed service identity (where both SystemAssigned and UserAssigned types + are allowed). Required. Known values are: "None", "SystemAssigned", "UserAssigned", and + "SystemAssigned,UserAssigned". + :vartype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType + :ivar user_assigned_identities: The set of user assigned identities associated with the + resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. + The dictionary values can be empty objects ({}) in requests. + :vartype user_assigned_identities: dict[str, + ~azure.mgmt.machinelearningservices.models.UserAssignedIdentity] """ _validation = { - "sampling_algorithm": {"required": True}, + "principal_id": {"readonly": True}, + "tenant_id": {"readonly": True}, + "type": {"required": True}, } _attribute_map = { - "early_termination": {"key": "earlyTermination", "type": "EarlyTerminationPolicy"}, - "sampling_algorithm": {"key": "samplingAlgorithm", "type": "str"}, + "principal_id": {"key": "principalId", "type": "str"}, + "tenant_id": {"key": "tenantId", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "user_assigned_identities": {"key": "userAssignedIdentities", "type": "{UserAssignedIdentity}"}, } def __init__( self, *, - sampling_algorithm: Union[str, "_models.SamplingAlgorithmType"], - early_termination: Optional["_models.EarlyTerminationPolicy"] = None, + type: Union[str, "_models.ManagedServiceIdentityType"], + user_assigned_identities: Optional[Dict[str, "_models.UserAssignedIdentity"]] = None, **kwargs: Any ) -> None: """ - :keyword early_termination: Type of early termination policy. - :paramtype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy - :keyword sampling_algorithm: [Required] Type of the hyperparameter sampling algorithms. - Required. Known values are: "Grid", "Random", and "Bayesian". - :paramtype sampling_algorithm: str or - ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + :keyword type: Type of managed service identity (where both SystemAssigned and UserAssigned + types are allowed). Required. Known values are: "None", "SystemAssigned", "UserAssigned", and + "SystemAssigned,UserAssigned". + :paramtype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType + :keyword user_assigned_identities: The set of user assigned identities associated with the + resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. + The dictionary values can be empty objects ({}) in requests. + :paramtype user_assigned_identities: dict[str, + ~azure.mgmt.machinelearningservices.models.UserAssignedIdentity] """ super().__init__(**kwargs) - self.early_termination = early_termination - self.sampling_algorithm = sampling_algorithm + self.principal_id = None + self.tenant_id = None + self.type = type + self.user_assigned_identities = user_assigned_identities -class InferenceContainerProperties(_serialization.Model): - """InferenceContainerProperties. +class MaterializationComputeResource(_serialization.Model): + """Dto object representing compute resource. - :ivar liveness_route: The route to check the liveness of the inference server container. - :vartype liveness_route: ~azure.mgmt.machinelearningservices.models.Route - :ivar readiness_route: The route to check the readiness of the inference server container. - :vartype readiness_route: ~azure.mgmt.machinelearningservices.models.Route - :ivar scoring_route: The port to send the scoring requests to, within the inference server - container. - :vartype scoring_route: ~azure.mgmt.machinelearningservices.models.Route + :ivar instance_type: Specifies the instance type. + :vartype instance_type: str """ _attribute_map = { - "liveness_route": {"key": "livenessRoute", "type": "Route"}, - "readiness_route": {"key": "readinessRoute", "type": "Route"}, - "scoring_route": {"key": "scoringRoute", "type": "Route"}, + "instance_type": {"key": "instanceType", "type": "str"}, } - def __init__( - self, - *, - liveness_route: Optional["_models.Route"] = None, - readiness_route: Optional["_models.Route"] = None, - scoring_route: Optional["_models.Route"] = None, - **kwargs: Any - ) -> None: + def __init__(self, *, instance_type: Optional[str] = None, **kwargs: Any) -> None: """ - :keyword liveness_route: The route to check the liveness of the inference server container. - :paramtype liveness_route: ~azure.mgmt.machinelearningservices.models.Route - :keyword readiness_route: The route to check the readiness of the inference server container. - :paramtype readiness_route: ~azure.mgmt.machinelearningservices.models.Route - :keyword scoring_route: The port to send the scoring requests to, within the inference server - container. - :paramtype scoring_route: ~azure.mgmt.machinelearningservices.models.Route + :keyword instance_type: Specifies the instance type. + :paramtype instance_type: str """ super().__init__(**kwargs) - self.liveness_route = liveness_route - self.readiness_route = readiness_route - self.scoring_route = scoring_route + self.instance_type = instance_type -class InstanceTypeSchema(_serialization.Model): - """Instance type schema. +class MaterializationSettings(_serialization.Model): + """MaterializationSettings. - :ivar node_selector: Node Selector. - :vartype node_selector: dict[str, str] - :ivar resources: Resource requests/limits for this instance type. - :vartype resources: ~azure.mgmt.machinelearningservices.models.InstanceTypeSchemaResources + :ivar notification: Specifies the notification details. + :vartype notification: ~azure.mgmt.machinelearningservices.models.NotificationSetting + :ivar resource: Specifies the compute resource settings. + :vartype resource: ~azure.mgmt.machinelearningservices.models.MaterializationComputeResource + :ivar schedule: Specifies the schedule details. + :vartype schedule: ~azure.mgmt.machinelearningservices.models.RecurrenceTrigger + :ivar spark_configuration: Specifies the spark compute settings. + :vartype spark_configuration: dict[str, str] + :ivar store_type: Specifies the stores to which materialization should happen. Known values + are: "None", "Online", "Offline", and "OnlineAndOffline". + :vartype store_type: str or ~azure.mgmt.machinelearningservices.models.MaterializationStoreType """ _attribute_map = { - "node_selector": {"key": "nodeSelector", "type": "{str}"}, - "resources": {"key": "resources", "type": "InstanceTypeSchemaResources"}, + "notification": {"key": "notification", "type": "NotificationSetting"}, + "resource": {"key": "resource", "type": "MaterializationComputeResource"}, + "schedule": {"key": "schedule", "type": "RecurrenceTrigger"}, + "spark_configuration": {"key": "sparkConfiguration", "type": "{str}"}, + "store_type": {"key": "storeType", "type": "str"}, } def __init__( self, *, - node_selector: Optional[Dict[str, str]] = None, - resources: Optional["_models.InstanceTypeSchemaResources"] = None, + notification: Optional["_models.NotificationSetting"] = None, + resource: Optional["_models.MaterializationComputeResource"] = None, + schedule: Optional["_models.RecurrenceTrigger"] = None, + spark_configuration: Optional[Dict[str, str]] = None, + store_type: Optional[Union[str, "_models.MaterializationStoreType"]] = None, **kwargs: Any ) -> None: """ - :keyword node_selector: Node Selector. - :paramtype node_selector: dict[str, str] - :keyword resources: Resource requests/limits for this instance type. - :paramtype resources: ~azure.mgmt.machinelearningservices.models.InstanceTypeSchemaResources - """ - super().__init__(**kwargs) - self.node_selector = node_selector - self.resources = resources + :keyword notification: Specifies the notification details. + :paramtype notification: ~azure.mgmt.machinelearningservices.models.NotificationSetting + :keyword resource: Specifies the compute resource settings. + :paramtype resource: ~azure.mgmt.machinelearningservices.models.MaterializationComputeResource + :keyword schedule: Specifies the schedule details. + :paramtype schedule: ~azure.mgmt.machinelearningservices.models.RecurrenceTrigger + :keyword spark_configuration: Specifies the spark compute settings. + :paramtype spark_configuration: dict[str, str] + :keyword store_type: Specifies the stores to which materialization should happen. Known values + are: "None", "Online", "Offline", and "OnlineAndOffline". + :paramtype store_type: str or + ~azure.mgmt.machinelearningservices.models.MaterializationStoreType + """ + super().__init__(**kwargs) + self.notification = notification + self.resource = resource + self.schedule = schedule + self.spark_configuration = spark_configuration + self.store_type = store_type -class InstanceTypeSchemaResources(_serialization.Model): - """Resource requests/limits for this instance type. +class MedianStoppingPolicy(EarlyTerminationPolicy): + """Defines an early termination policy based on running averages of the primary metric of all + runs. - :ivar requests: Resource requests for this instance type. - :vartype requests: dict[str, str] - :ivar limits: Resource limits for this instance type. - :vartype limits: dict[str, str] + All required parameters must be populated in order to send to Azure. + + :ivar delay_evaluation: Number of intervals by which to delay the first evaluation. + :vartype delay_evaluation: int + :ivar evaluation_interval: Interval (number of runs) between policy evaluations. + :vartype evaluation_interval: int + :ivar policy_type: [Required] Name of policy configuration. Required. Known values are: + "Bandit", "MedianStopping", and "TruncationSelection". + :vartype policy_type: str or + ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicyType """ + _validation = { + "policy_type": {"required": True}, + } + _attribute_map = { - "requests": {"key": "requests", "type": "{str}"}, - "limits": {"key": "limits", "type": "{str}"}, + "delay_evaluation": {"key": "delayEvaluation", "type": "int"}, + "evaluation_interval": {"key": "evaluationInterval", "type": "int"}, + "policy_type": {"key": "policyType", "type": "str"}, } - def __init__( - self, *, requests: Optional[Dict[str, str]] = None, limits: Optional[Dict[str, str]] = None, **kwargs: Any - ) -> None: + def __init__(self, *, delay_evaluation: int = 0, evaluation_interval: int = 0, **kwargs: Any) -> None: """ - :keyword requests: Resource requests for this instance type. - :paramtype requests: dict[str, str] - :keyword limits: Resource limits for this instance type. - :paramtype limits: dict[str, str] + :keyword delay_evaluation: Number of intervals by which to delay the first evaluation. + :paramtype delay_evaluation: int + :keyword evaluation_interval: Interval (number of runs) between policy evaluations. + :paramtype evaluation_interval: int """ - super().__init__(**kwargs) - self.requests = requests - self.limits = limits + super().__init__(delay_evaluation=delay_evaluation, evaluation_interval=evaluation_interval, **kwargs) + self.policy_type: str = "MedianStopping" -class JobBase(Resource): - """Azure Resource Manager resource envelope. +class MLAssistConfiguration(_serialization.Model): + """Labeling MLAssist configuration definition. - Variables are only populated by the server, and will be ignored when sending a request. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + MLAssistConfigurationDisabled, MLAssistConfigurationEnabled All required parameters must be populated in order to send to Azure. - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: [Required] Additional attributes of the entity. Required. - :vartype properties: ~azure.mgmt.machinelearningservices.models.JobBaseProperties + :ivar ml_assist: [Required] Indicates whether MLAssist feature is enabled. Required. Known + values are: "Enabled" and "Disabled". + :vartype ml_assist: str or ~azure.mgmt.machinelearningservices.models.MLAssistConfigurationType """ _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "system_data": {"readonly": True}, - "properties": {"required": True}, + "ml_assist": {"required": True}, } _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "system_data": {"key": "systemData", "type": "SystemData"}, - "properties": {"key": "properties", "type": "JobBaseProperties"}, + "ml_assist": {"key": "mlAssist", "type": "str"}, } - def __init__(self, *, properties: "_models.JobBaseProperties", **kwargs: Any) -> None: - """ - :keyword properties: [Required] Additional attributes of the entity. Required. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.JobBaseProperties - """ + _subtype_map = { + "ml_assist": {"Disabled": "MLAssistConfigurationDisabled", "Enabled": "MLAssistConfigurationEnabled"} + } + + def __init__(self, **kwargs: Any) -> None: + """ """ super().__init__(**kwargs) - self.properties = properties + self.ml_assist: Optional[str] = None -class JobBaseResourceArmPaginatedResult(_serialization.Model): - """A paginated list of JobBase entities. +class MLAssistConfigurationDisabled(MLAssistConfiguration): + """Labeling MLAssist configuration definition when MLAssist is disabled. - :ivar next_link: The link to the next page of JobBase objects. If null, there are no additional - pages. - :vartype next_link: str - :ivar value: An array of objects of type JobBase. - :vartype value: list[~azure.mgmt.machinelearningservices.models.JobBase] + All required parameters must be populated in order to send to Azure. + + :ivar ml_assist: [Required] Indicates whether MLAssist feature is enabled. Required. Known + values are: "Enabled" and "Disabled". + :vartype ml_assist: str or ~azure.mgmt.machinelearningservices.models.MLAssistConfigurationType """ + _validation = { + "ml_assist": {"required": True}, + } + _attribute_map = { - "next_link": {"key": "nextLink", "type": "str"}, - "value": {"key": "value", "type": "[JobBase]"}, + "ml_assist": {"key": "mlAssist", "type": "str"}, } - def __init__( - self, *, next_link: Optional[str] = None, value: Optional[List["_models.JobBase"]] = None, **kwargs: Any - ) -> None: + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.ml_assist: str = "Disabled" + + +class MLAssistConfigurationEnabled(MLAssistConfiguration): + """Labeling MLAssist configuration definition when MLAssist is enabled. + + All required parameters must be populated in order to send to Azure. + + :ivar ml_assist: [Required] Indicates whether MLAssist feature is enabled. Required. Known + values are: "Enabled" and "Disabled". + :vartype ml_assist: str or ~azure.mgmt.machinelearningservices.models.MLAssistConfigurationType + :ivar inferencing_compute_binding: [Required] AML compute binding used in inferencing. + Required. + :vartype inferencing_compute_binding: str + :ivar training_compute_binding: [Required] AML compute binding used in training. Required. + :vartype training_compute_binding: str + """ + + _validation = { + "ml_assist": {"required": True}, + "inferencing_compute_binding": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "training_compute_binding": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "ml_assist": {"key": "mlAssist", "type": "str"}, + "inferencing_compute_binding": {"key": "inferencingComputeBinding", "type": "str"}, + "training_compute_binding": {"key": "trainingComputeBinding", "type": "str"}, + } + + def __init__(self, *, inferencing_compute_binding: str, training_compute_binding: str, **kwargs: Any) -> None: """ - :keyword next_link: The link to the next page of JobBase objects. If null, there are no - additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type JobBase. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.JobBase] + :keyword inferencing_compute_binding: [Required] AML compute binding used in inferencing. + Required. + :paramtype inferencing_compute_binding: str + :keyword training_compute_binding: [Required] AML compute binding used in training. Required. + :paramtype training_compute_binding: str """ super().__init__(**kwargs) - self.next_link = next_link - self.value = value + self.ml_assist: str = "Enabled" + self.inferencing_compute_binding = inferencing_compute_binding + self.training_compute_binding = training_compute_binding -class JobResourceConfiguration(ResourceConfiguration): - """JobResourceConfiguration. +class MLFlowModelJobInput(AssetJobInput, JobInput): + """MLFlowModelJobInput. - :ivar instance_count: Optional number of instances or nodes used by the compute target. - :vartype instance_count: int - :ivar instance_type: Optional type of VM used as supported by the compute target. - :vartype instance_type: str - :ivar properties: Additional properties bag. - :vartype properties: dict[str, JSON] - :ivar docker_args: Extra arguments to pass to the Docker run command. This would override any - parameters that have already been set by the system, or in this section. This parameter is only - supported for Azure ML compute types. - :vartype docker_args: str - :ivar shm_size: Size of the docker container's shared memory block. This should be in the - format of (number)(unit) where number as to be greater than 0 and the unit can be one of - b(bytes), k(kilobytes), m(megabytes), or g(gigabytes). - :vartype shm_size: str + All required parameters must be populated in order to send to Azure. + + :ivar description: Description for the input. + :vartype description: str + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :ivar uri: [Required] Input Asset URI. Required. + :vartype uri: str """ _validation = { - "shm_size": {"pattern": r"\d+[bBkKmMgG]"}, + "job_input_type": {"required": True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - "instance_count": {"key": "instanceCount", "type": "int"}, - "instance_type": {"key": "instanceType", "type": "str"}, - "properties": {"key": "properties", "type": "{object}"}, - "docker_args": {"key": "dockerArgs", "type": "str"}, - "shm_size": {"key": "shmSize", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( self, *, - instance_count: int = 1, - instance_type: Optional[str] = None, - properties: Optional[Dict[str, JSON]] = None, - docker_args: Optional[str] = None, - shm_size: str = "2g", + uri: str, + description: Optional[str] = None, + mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, **kwargs: Any ) -> None: """ - :keyword instance_count: Optional number of instances or nodes used by the compute target. - :paramtype instance_count: int - :keyword instance_type: Optional type of VM used as supported by the compute target. - :paramtype instance_type: str - :keyword properties: Additional properties bag. - :paramtype properties: dict[str, JSON] - :keyword docker_args: Extra arguments to pass to the Docker run command. This would override - any parameters that have already been set by the system, or in this section. This parameter is - only supported for Azure ML compute types. - :paramtype docker_args: str - :keyword shm_size: Size of the docker container's shared memory block. This should be in the - format of (number)(unit) where number as to be greater than 0 and the unit can be one of - b(bytes), k(kilobytes), m(megabytes), or g(gigabytes). - :paramtype shm_size: str + :keyword description: Description for the input. + :paramtype description: str + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :keyword uri: [Required] Input Asset URI. Required. + :paramtype uri: str """ - super().__init__(instance_count=instance_count, instance_type=instance_type, properties=properties, **kwargs) - self.docker_args = docker_args - self.shm_size = shm_size + super().__init__(mode=mode, uri=uri, description=description, **kwargs) + self.description = description + self.job_input_type: str = "mlflow_model" + self.mode = mode + self.uri = uri -class JobScheduleAction(ScheduleActionBase): - """JobScheduleAction. +class MLFlowModelJobOutput(AssetJobOutput, JobOutput): + """MLFlowModelJobOutput. All required parameters must be populated in order to send to Azure. - :ivar action_type: [Required] Specifies the action type of the schedule. Required. Known values - are: "CreateJob" and "InvokeBatchEndpoint". - :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType - :ivar job_definition: [Required] Defines Schedule action definition details. Required. - :vartype job_definition: ~azure.mgmt.machinelearningservices.models.JobBaseProperties + :ivar description: Description for the output. + :vartype description: str + :ivar job_output_type: [Required] Specifies the type of job. Required. Known values are: + "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". + :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType + :ivar asset_name: Output Asset Name. + :vartype asset_name: str + :ivar asset_version: Output Asset Version. + :vartype asset_version: str + :ivar auto_delete_setting: Auto delete setting of output data asset. + :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :ivar uri: Output Asset URI. + :vartype uri: str """ _validation = { - "action_type": {"required": True}, - "job_definition": {"required": True}, + "job_output_type": {"required": True}, } _attribute_map = { - "action_type": {"key": "actionType", "type": "str"}, - "job_definition": {"key": "jobDefinition", "type": "JobBaseProperties"}, + "description": {"key": "description", "type": "str"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, + "asset_name": {"key": "assetName", "type": "str"}, + "asset_version": {"key": "assetVersion", "type": "str"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } - def __init__(self, *, job_definition: "_models.JobBaseProperties", **kwargs: Any) -> None: + def __init__( + self, + *, + description: Optional[str] = None, + asset_name: Optional[str] = None, + asset_version: Optional[str] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, + uri: Optional[str] = None, + **kwargs: Any + ) -> None: """ - :keyword job_definition: [Required] Defines Schedule action definition details. Required. - :paramtype job_definition: ~azure.mgmt.machinelearningservices.models.JobBaseProperties + :keyword description: Description for the output. + :paramtype description: str + :keyword asset_name: Output Asset Name. + :paramtype asset_name: str + :keyword asset_version: Output Asset Version. + :paramtype asset_version: str + :keyword auto_delete_setting: Auto delete setting of output data asset. + :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :keyword uri: Output Asset URI. + :paramtype uri: str """ - super().__init__(**kwargs) - self.action_type: str = "CreateJob" - self.job_definition = job_definition + super().__init__( + asset_name=asset_name, + asset_version=asset_version, + auto_delete_setting=auto_delete_setting, + mode=mode, + uri=uri, + description=description, + **kwargs + ) + self.description = description + self.job_output_type: str = "mlflow_model" + self.asset_name = asset_name + self.asset_version = asset_version + self.auto_delete_setting = auto_delete_setting + self.mode = mode + self.uri = uri -class JobService(_serialization.Model): - """Job endpoint definition. +class MLTableData(DataVersionBaseProperties): # pylint: disable=too-many-instance-attributes + """MLTable data definition. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar endpoint: Url for endpoint. - :vartype endpoint: str - :ivar error_message: Any error in the service. - :vartype error_message: str - :ivar job_service_type: Endpoint type. - :vartype job_service_type: str - :ivar nodes: Nodes that user would like to start the service on. - If Nodes is not set or set to null, the service will only be started on leader node. - :vartype nodes: ~azure.mgmt.machinelearningservices.models.Nodes - :ivar port: Port for endpoint. - :vartype port: int - :ivar properties: Additional properties to set on the endpoint. + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar status: Status of endpoint. - :vartype status: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :ivar is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. + :vartype is_archived: bool + :ivar data_type: [Required] Specifies the type of data. Required. Known values are: "uri_file", + "uri_folder", and "mltable". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType + :ivar data_uri: [Required] Uri of the data. Example: + https://go.microsoft.com/fwlink/?linkid=2202330. Required. + :vartype data_uri: str + :ivar intellectual_property: Intellectual Property details. Used if data is an Intellectual + Property. + :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty + :ivar stage: Stage in the data lifecycle assigned to this data asset. + :vartype stage: str + :ivar referenced_uris: Uris referenced in the MLTable definition (required for lineage). + :vartype referenced_uris: list[str] """ _validation = { - "error_message": {"readonly": True}, - "status": {"readonly": True}, + "data_type": {"required": True}, + "data_uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - "endpoint": {"key": "endpoint", "type": "str"}, - "error_message": {"key": "errorMessage", "type": "str"}, - "job_service_type": {"key": "jobServiceType", "type": "str"}, - "nodes": {"key": "nodes", "type": "Nodes"}, - "port": {"key": "port", "type": "int"}, + "description": {"key": "description", "type": "str"}, "properties": {"key": "properties", "type": "{str}"}, - "status": {"key": "status", "type": "str"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "data_type": {"key": "dataType", "type": "str"}, + "data_uri": {"key": "dataUri", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "stage": {"key": "stage", "type": "str"}, + "referenced_uris": {"key": "referencedUris", "type": "[str]"}, } def __init__( self, *, - endpoint: Optional[str] = None, - job_service_type: Optional[str] = None, - nodes: Optional["_models.Nodes"] = None, - port: Optional[int] = None, + data_uri: str, + description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + is_anonymous: bool = False, + is_archived: bool = False, + intellectual_property: Optional["_models.IntellectualProperty"] = None, + stage: Optional[str] = None, + referenced_uris: Optional[List[str]] = None, **kwargs: Any ) -> None: """ - :keyword endpoint: Url for endpoint. - :paramtype endpoint: str - :keyword job_service_type: Endpoint type. - :paramtype job_service_type: str - :keyword nodes: Nodes that user would like to start the service on. - If Nodes is not set or set to null, the service will only be started on leader node. - :paramtype nodes: ~azure.mgmt.machinelearningservices.models.Nodes - :keyword port: Port for endpoint. - :paramtype port: int - :keyword properties: Additional properties to set on the endpoint. + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :keyword is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. + :paramtype is_archived: bool + :keyword data_uri: [Required] Uri of the data. Example: + https://go.microsoft.com/fwlink/?linkid=2202330. Required. + :paramtype data_uri: str + :keyword intellectual_property: Intellectual Property details. Used if data is an Intellectual + Property. + :paramtype intellectual_property: + ~azure.mgmt.machinelearningservices.models.IntellectualProperty + :keyword stage: Stage in the data lifecycle assigned to this data asset. + :paramtype stage: str + :keyword referenced_uris: Uris referenced in the MLTable definition (required for lineage). + :paramtype referenced_uris: list[str] """ - super().__init__(**kwargs) - self.endpoint = endpoint - self.error_message = None - self.job_service_type = job_service_type - self.nodes = nodes - self.port = port - self.properties = properties - self.status = None + super().__init__( + description=description, + properties=properties, + tags=tags, + auto_delete_setting=auto_delete_setting, + is_anonymous=is_anonymous, + is_archived=is_archived, + data_uri=data_uri, + intellectual_property=intellectual_property, + stage=stage, + **kwargs + ) + self.data_type: str = "mltable" + self.referenced_uris = referenced_uris -class KubernetesSchema(_serialization.Model): - """Kubernetes Compute Schema. +class MLTableJobInput(AssetJobInput, JobInput): + """MLTableJobInput. - :ivar properties: Properties of Kubernetes. - :vartype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties + All required parameters must be populated in order to send to Azure. + + :ivar description: Description for the input. + :vartype description: str + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :ivar uri: [Required] Input Asset URI. Required. + :vartype uri: str """ + _validation = { + "job_input_type": {"required": True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + } + _attribute_map = { - "properties": {"key": "properties", "type": "KubernetesProperties"}, + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } - def __init__(self, *, properties: Optional["_models.KubernetesProperties"] = None, **kwargs: Any) -> None: + def __init__( + self, + *, + uri: str, + description: Optional[str] = None, + mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, + **kwargs: Any + ) -> None: """ - :keyword properties: Properties of Kubernetes. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties + :keyword description: Description for the input. + :paramtype description: str + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :keyword uri: [Required] Input Asset URI. Required. + :paramtype uri: str """ - super().__init__(**kwargs) - self.properties = properties - + super().__init__(mode=mode, uri=uri, description=description, **kwargs) + self.description = description + self.job_input_type: str = "mltable" + self.mode = mode + self.uri = uri -class Kubernetes(Compute, KubernetesSchema): # pylint: disable=too-many-instance-attributes - """A Machine Learning compute based on Kubernetes Compute. - Variables are only populated by the server, and will be ignored when sending a request. +class MLTableJobOutput(AssetJobOutput, JobOutput): + """MLTableJobOutput. All required parameters must be populated in order to send to Azure. - :ivar properties: Properties of Kubernetes. - :vartype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties - :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", - "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", - "DataLakeAnalytics", and "SynapseSpark". - :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :ivar compute_location: Location for the underlying compute. - :vartype compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :ivar description: The description of the Machine Learning compute. + :ivar description: Description for the output. :vartype description: str - :ivar created_on: The time at which the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The time at which the compute was last modified. - :vartype modified_on: ~datetime.datetime - :ivar resource_id: ARM resource id of the underlying compute. - :vartype resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI - and AAD exclusively for authentication. - :vartype disable_local_auth: bool + :ivar job_output_type: [Required] Specifies the type of job. Required. Known values are: + "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". + :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType + :ivar asset_name: Output Asset Name. + :vartype asset_name: str + :ivar asset_version: Output Asset Version. + :vartype asset_version: str + :ivar auto_delete_setting: Auto delete setting of output data asset. + :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :ivar uri: Output Asset URI. + :vartype uri: str """ _validation = { - "compute_type": {"required": True}, - "provisioning_state": {"readonly": True}, - "created_on": {"readonly": True}, - "modified_on": {"readonly": True}, - "provisioning_errors": {"readonly": True}, - "is_attached_compute": {"readonly": True}, + "job_output_type": {"required": True}, } _attribute_map = { - "properties": {"key": "properties", "type": "KubernetesProperties"}, - "compute_type": {"key": "computeType", "type": "str"}, - "compute_location": {"key": "computeLocation", "type": "str"}, - "provisioning_state": {"key": "provisioningState", "type": "str"}, "description": {"key": "description", "type": "str"}, - "created_on": {"key": "createdOn", "type": "iso-8601"}, - "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, - "resource_id": {"key": "resourceId", "type": "str"}, - "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, - "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, - "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, + "asset_name": {"key": "assetName", "type": "str"}, + "asset_version": {"key": "assetVersion", "type": "str"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( self, *, - properties: Optional["_models.KubernetesProperties"] = None, - compute_location: Optional[str] = None, description: Optional[str] = None, - resource_id: Optional[str] = None, - disable_local_auth: Optional[bool] = None, + asset_name: Optional[str] = None, + asset_version: Optional[str] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, + uri: Optional[str] = None, **kwargs: Any ) -> None: """ - :keyword properties: Properties of Kubernetes. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties - :keyword compute_location: Location for the underlying compute. - :paramtype compute_location: str - :keyword description: The description of the Machine Learning compute. + :keyword description: Description for the output. :paramtype description: str - :keyword resource_id: ARM resource id of the underlying compute. - :paramtype resource_id: str - :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only - MSI and AAD exclusively for authentication. - :paramtype disable_local_auth: bool + :keyword asset_name: Output Asset Name. + :paramtype asset_name: str + :keyword asset_version: Output Asset Version. + :paramtype asset_version: str + :keyword auto_delete_setting: Auto delete setting of output data asset. + :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :keyword uri: Output Asset URI. + :paramtype uri: str """ super().__init__( - compute_location=compute_location, + asset_name=asset_name, + asset_version=asset_version, + auto_delete_setting=auto_delete_setting, + mode=mode, + uri=uri, description=description, - resource_id=resource_id, - disable_local_auth=disable_local_auth, - properties=properties, **kwargs ) - self.properties = properties - self.compute_type: str = "Kubernetes" - self.compute_location = compute_location - self.provisioning_state = None self.description = description - self.created_on = None - self.modified_on = None - self.resource_id = resource_id - self.provisioning_errors = None - self.is_attached_compute = None - self.disable_local_auth = disable_local_auth + self.job_output_type: str = "mltable" + self.asset_name = asset_name + self.asset_version = asset_version + self.auto_delete_setting = auto_delete_setting + self.mode = mode + self.uri = uri -class OnlineDeploymentProperties(EndpointDeploymentPropertiesBase): # pylint: disable=too-many-instance-attributes - """OnlineDeploymentProperties. +class ModelConfiguration(_serialization.Model): + """Model configuration options. - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - KubernetesOnlineDeployment, ManagedOnlineDeployment + :ivar mode: Input delivery mode for the model. Known values are: "Copy" and "Download". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.PackageInputDeliveryMode + :ivar mount_path: Relative mounting path of the model in the target image. + :vartype mount_path: str + """ + + _attribute_map = { + "mode": {"key": "mode", "type": "str"}, + "mount_path": {"key": "mountPath", "type": "str"}, + } + + def __init__( + self, + *, + mode: Optional[Union[str, "_models.PackageInputDeliveryMode"]] = None, + mount_path: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword mode: Input delivery mode for the model. Known values are: "Copy" and "Download". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.PackageInputDeliveryMode + :keyword mount_path: Relative mounting path of the model in the target image. + :paramtype mount_path: str + """ + super().__init__(**kwargs) + self.mode = mode + self.mount_path = mount_path + + +class ModelContainer(Resource): + """Azure Resource Manager resource envelope. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar code_configuration: Code configuration for the endpoint deployment. - :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration - :ivar description: Description of the endpoint deployment. - :vartype description: str - :ivar environment_id: ARM resource ID or AssetId of the environment specification for the - endpoint deployment. - :vartype environment_id: str - :ivar environment_variables: Environment variables configuration for the deployment. - :vartype environment_variables: dict[str, str] - :ivar properties: Property dictionary. Properties can be added, but not removed or altered. - :vartype properties: dict[str, str] - :ivar app_insights_enabled: If true, enables Application Insights logging. - :vartype app_insights_enabled: bool - :ivar egress_public_network_access: If Enabled, allow egress public network access. If - Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and - "Disabled". - :vartype egress_public_network_access: str or - ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType - :ivar endpoint_compute_type: [Required] The compute type of the endpoint. Required. Known - values are: "Managed", "Kubernetes", and "AzureMLCompute". - :vartype endpoint_compute_type: str or - ~azure.mgmt.machinelearningservices.models.EndpointComputeType - :ivar instance_type: Compute instance type. - :vartype instance_type: str - :ivar liveness_probe: Liveness probe monitors the health of the container regularly. - :vartype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :ivar model: The URI path to the model. - :vartype model: str - :ivar model_mount_path: The path to mount the model in custom container. - :vartype model_mount_path: str - :ivar provisioning_state: Provisioning state for the endpoint deployment. Known values are: - "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", and "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState - :ivar readiness_probe: Readiness probe validates if the container is ready to serve traffic. - The properties and defaults are the same as liveness probe. - :vartype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :ivar request_settings: Request settings for the deployment. - :vartype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings - :ivar scale_settings: Scale settings for the deployment. - If it is null or not provided, - it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment - and to DefaultScaleSettings for ManagedOnlineDeployment. - :vartype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.ModelContainerProperties """ _validation = { - "endpoint_compute_type": {"required": True}, - "provisioning_state": {"readonly": True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, - "description": {"key": "description", "type": "str"}, - "environment_id": {"key": "environmentId", "type": "str"}, - "environment_variables": {"key": "environmentVariables", "type": "{str}"}, - "properties": {"key": "properties", "type": "{str}"}, - "app_insights_enabled": {"key": "appInsightsEnabled", "type": "bool"}, - "egress_public_network_access": {"key": "egressPublicNetworkAccess", "type": "str"}, - "endpoint_compute_type": {"key": "endpointComputeType", "type": "str"}, - "instance_type": {"key": "instanceType", "type": "str"}, - "liveness_probe": {"key": "livenessProbe", "type": "ProbeSettings"}, - "model": {"key": "model", "type": "str"}, - "model_mount_path": {"key": "modelMountPath", "type": "str"}, - "provisioning_state": {"key": "provisioningState", "type": "str"}, - "readiness_probe": {"key": "readinessProbe", "type": "ProbeSettings"}, - "request_settings": {"key": "requestSettings", "type": "OnlineRequestSettings"}, - "scale_settings": {"key": "scaleSettings", "type": "OnlineScaleSettings"}, - } - - _subtype_map = { - "endpoint_compute_type": {"Kubernetes": "KubernetesOnlineDeployment", "Managed": "ManagedOnlineDeployment"} + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ModelContainerProperties"}, } - def __init__( - self, - *, - code_configuration: Optional["_models.CodeConfiguration"] = None, - description: Optional[str] = None, - environment_id: Optional[str] = None, - environment_variables: Optional[Dict[str, str]] = None, - properties: Optional[Dict[str, str]] = None, - app_insights_enabled: bool = False, - egress_public_network_access: Optional[Union[str, "_models.EgressPublicNetworkAccessType"]] = None, - instance_type: Optional[str] = None, - liveness_probe: Optional["_models.ProbeSettings"] = None, - model: Optional[str] = None, - model_mount_path: Optional[str] = None, - readiness_probe: Optional["_models.ProbeSettings"] = None, - request_settings: Optional["_models.OnlineRequestSettings"] = None, - scale_settings: Optional["_models.OnlineScaleSettings"] = None, - **kwargs: Any - ) -> None: + def __init__(self, *, properties: "_models.ModelContainerProperties", **kwargs: Any) -> None: """ - :keyword code_configuration: Code configuration for the endpoint deployment. - :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration - :keyword description: Description of the endpoint deployment. - :paramtype description: str - :keyword environment_id: ARM resource ID or AssetId of the environment specification for the - endpoint deployment. - :paramtype environment_id: str - :keyword environment_variables: Environment variables configuration for the deployment. - :paramtype environment_variables: dict[str, str] - :keyword properties: Property dictionary. Properties can be added, but not removed or altered. - :paramtype properties: dict[str, str] - :keyword app_insights_enabled: If true, enables Application Insights logging. - :paramtype app_insights_enabled: bool - :keyword egress_public_network_access: If Enabled, allow egress public network access. If - Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and - "Disabled". - :paramtype egress_public_network_access: str or - ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType - :keyword instance_type: Compute instance type. - :paramtype instance_type: str - :keyword liveness_probe: Liveness probe monitors the health of the container regularly. - :paramtype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :keyword model: The URI path to the model. - :paramtype model: str - :keyword model_mount_path: The path to mount the model in custom container. - :paramtype model_mount_path: str - :keyword readiness_probe: Readiness probe validates if the container is ready to serve traffic. - The properties and defaults are the same as liveness probe. - :paramtype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :keyword request_settings: Request settings for the deployment. - :paramtype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings - :keyword scale_settings: Scale settings for the deployment. - If it is null or not provided, - it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment - and to DefaultScaleSettings for ManagedOnlineDeployment. - :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.ModelContainerProperties """ - super().__init__( - code_configuration=code_configuration, - description=description, - environment_id=environment_id, - environment_variables=environment_variables, - properties=properties, - **kwargs - ) - self.app_insights_enabled = app_insights_enabled - self.egress_public_network_access = egress_public_network_access - self.endpoint_compute_type: Optional[str] = None - self.instance_type = instance_type - self.liveness_probe = liveness_probe - self.model = model - self.model_mount_path = model_mount_path - self.provisioning_state = None - self.readiness_probe = readiness_probe - self.request_settings = request_settings - self.scale_settings = scale_settings - + super().__init__(**kwargs) + self.properties = properties -class KubernetesOnlineDeployment(OnlineDeploymentProperties): # pylint: disable=too-many-instance-attributes - """Properties specific to a KubernetesOnlineDeployment. - Variables are only populated by the server, and will be ignored when sending a request. +class ModelContainerProperties(AssetContainer): + """ModelContainerProperties. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. - :ivar code_configuration: Code configuration for the endpoint deployment. - :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration - :ivar description: Description of the endpoint deployment. + :ivar description: The asset description text. :vartype description: str - :ivar environment_id: ARM resource ID or AssetId of the environment specification for the - endpoint deployment. - :vartype environment_id: str - :ivar environment_variables: Environment variables configuration for the deployment. - :vartype environment_variables: dict[str, str] - :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar app_insights_enabled: If true, enables Application Insights logging. - :vartype app_insights_enabled: bool - :ivar egress_public_network_access: If Enabled, allow egress public network access. If - Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and - "Disabled". - :vartype egress_public_network_access: str or - ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType - :ivar endpoint_compute_type: [Required] The compute type of the endpoint. Required. Known - values are: "Managed", "Kubernetes", and "AzureMLCompute". - :vartype endpoint_compute_type: str or - ~azure.mgmt.machinelearningservices.models.EndpointComputeType - :ivar instance_type: Compute instance type. - :vartype instance_type: str - :ivar liveness_probe: Liveness probe monitors the health of the container regularly. - :vartype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :ivar model: The URI path to the model. - :vartype model: str - :ivar model_mount_path: The path to mount the model in custom container. - :vartype model_mount_path: str - :ivar provisioning_state: Provisioning state for the endpoint deployment. Known values are: - "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", and "Canceled". + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar latest_version: The latest version inside this container. + :vartype latest_version: str + :ivar next_version: The next auto incremental version. + :vartype next_version: str + :ivar provisioning_state: Provisioning state for the model container. Known values are: + "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState - :ivar readiness_probe: Readiness probe validates if the container is ready to serve traffic. - The properties and defaults are the same as liveness probe. - :vartype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :ivar request_settings: Request settings for the deployment. - :vartype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings - :ivar scale_settings: Scale settings for the deployment. - If it is null or not provided, - it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment - and to DefaultScaleSettings for ManagedOnlineDeployment. - :vartype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings - :ivar container_resource_requirements: The resource requirements for the container (cpu and - memory). - :vartype container_resource_requirements: - ~azure.mgmt.machinelearningservices.models.ContainerResourceRequirements + ~azure.mgmt.machinelearningservices.models.AssetProvisioningState """ _validation = { - "endpoint_compute_type": {"required": True}, + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, "provisioning_state": {"readonly": True}, } _attribute_map = { - "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, "description": {"key": "description", "type": "str"}, - "environment_id": {"key": "environmentId", "type": "str"}, - "environment_variables": {"key": "environmentVariables", "type": "{str}"}, "properties": {"key": "properties", "type": "{str}"}, - "app_insights_enabled": {"key": "appInsightsEnabled", "type": "bool"}, - "egress_public_network_access": {"key": "egressPublicNetworkAccess", "type": "str"}, - "endpoint_compute_type": {"key": "endpointComputeType", "type": "str"}, - "instance_type": {"key": "instanceType", "type": "str"}, - "liveness_probe": {"key": "livenessProbe", "type": "ProbeSettings"}, - "model": {"key": "model", "type": "str"}, - "model_mount_path": {"key": "modelMountPath", "type": "str"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, "provisioning_state": {"key": "provisioningState", "type": "str"}, - "readiness_probe": {"key": "readinessProbe", "type": "ProbeSettings"}, - "request_settings": {"key": "requestSettings", "type": "OnlineRequestSettings"}, - "scale_settings": {"key": "scaleSettings", "type": "OnlineScaleSettings"}, - "container_resource_requirements": { - "key": "containerResourceRequirements", - "type": "ContainerResourceRequirements", - }, } def __init__( self, *, - code_configuration: Optional["_models.CodeConfiguration"] = None, description: Optional[str] = None, - environment_id: Optional[str] = None, - environment_variables: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, - app_insights_enabled: bool = False, - egress_public_network_access: Optional[Union[str, "_models.EgressPublicNetworkAccessType"]] = None, - instance_type: Optional[str] = None, - liveness_probe: Optional["_models.ProbeSettings"] = None, - model: Optional[str] = None, - model_mount_path: Optional[str] = None, - readiness_probe: Optional["_models.ProbeSettings"] = None, - request_settings: Optional["_models.OnlineRequestSettings"] = None, - scale_settings: Optional["_models.OnlineScaleSettings"] = None, - container_resource_requirements: Optional["_models.ContainerResourceRequirements"] = None, + tags: Optional[Dict[str, str]] = None, + is_archived: bool = False, **kwargs: Any ) -> None: """ - :keyword code_configuration: Code configuration for the endpoint deployment. - :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration - :keyword description: Description of the endpoint deployment. + :keyword description: The asset description text. :paramtype description: str - :keyword environment_id: ARM resource ID or AssetId of the environment specification for the - endpoint deployment. - :paramtype environment_id: str - :keyword environment_variables: Environment variables configuration for the deployment. - :paramtype environment_variables: dict[str, str] - :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword app_insights_enabled: If true, enables Application Insights logging. - :paramtype app_insights_enabled: bool - :keyword egress_public_network_access: If Enabled, allow egress public network access. If - Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and - "Disabled". - :paramtype egress_public_network_access: str or - ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType - :keyword instance_type: Compute instance type. - :paramtype instance_type: str - :keyword liveness_probe: Liveness probe monitors the health of the container regularly. - :paramtype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :keyword model: The URI path to the model. - :paramtype model: str - :keyword model_mount_path: The path to mount the model in custom container. - :paramtype model_mount_path: str - :keyword readiness_probe: Readiness probe validates if the container is ready to serve traffic. - The properties and defaults are the same as liveness probe. - :paramtype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :keyword request_settings: Request settings for the deployment. - :paramtype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings - :keyword scale_settings: Scale settings for the deployment. - If it is null or not provided, - it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment - and to DefaultScaleSettings for ManagedOnlineDeployment. - :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings - :keyword container_resource_requirements: The resource requirements for the container (cpu and - memory). - :paramtype container_resource_requirements: - ~azure.mgmt.machinelearningservices.models.ContainerResourceRequirements + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool """ - super().__init__( - code_configuration=code_configuration, - description=description, - environment_id=environment_id, - environment_variables=environment_variables, - properties=properties, - app_insights_enabled=app_insights_enabled, - egress_public_network_access=egress_public_network_access, - instance_type=instance_type, - liveness_probe=liveness_probe, - model=model, - model_mount_path=model_mount_path, - readiness_probe=readiness_probe, - request_settings=request_settings, - scale_settings=scale_settings, - **kwargs - ) - self.endpoint_compute_type: str = "Kubernetes" - self.container_resource_requirements = container_resource_requirements + super().__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) + self.provisioning_state = None -class KubernetesProperties(_serialization.Model): - """Kubernetes properties. +class ModelContainerResourceArmPaginatedResult(_serialization.Model): + """A paginated list of ModelContainer entities. - :ivar relay_connection_string: Relay connection string. - :vartype relay_connection_string: str - :ivar service_bus_connection_string: ServiceBus connection string. - :vartype service_bus_connection_string: str - :ivar extension_principal_id: Extension principal-id. - :vartype extension_principal_id: str - :ivar extension_instance_release_train: Extension instance release train. - :vartype extension_instance_release_train: str - :ivar vc_name: VC name. - :vartype vc_name: str - :ivar namespace: Compute namespace. - :vartype namespace: str - :ivar default_instance_type: Default instance type. - :vartype default_instance_type: str - :ivar instance_types: Instance Type Schema. - :vartype instance_types: dict[str, - ~azure.mgmt.machinelearningservices.models.InstanceTypeSchema] + :ivar next_link: The link to the next page of ModelContainer objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type ModelContainer. + :vartype value: list[~azure.mgmt.machinelearningservices.models.ModelContainer] """ _attribute_map = { - "relay_connection_string": {"key": "relayConnectionString", "type": "str"}, - "service_bus_connection_string": {"key": "serviceBusConnectionString", "type": "str"}, - "extension_principal_id": {"key": "extensionPrincipalId", "type": "str"}, - "extension_instance_release_train": {"key": "extensionInstanceReleaseTrain", "type": "str"}, - "vc_name": {"key": "vcName", "type": "str"}, - "namespace": {"key": "namespace", "type": "str"}, - "default_instance_type": {"key": "defaultInstanceType", "type": "str"}, - "instance_types": {"key": "instanceTypes", "type": "{InstanceTypeSchema}"}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[ModelContainer]"}, } def __init__( - self, - *, - relay_connection_string: Optional[str] = None, - service_bus_connection_string: Optional[str] = None, - extension_principal_id: Optional[str] = None, - extension_instance_release_train: Optional[str] = None, - vc_name: Optional[str] = None, - namespace: str = "default", - default_instance_type: Optional[str] = None, - instance_types: Optional[Dict[str, "_models.InstanceTypeSchema"]] = None, - **kwargs: Any + self, *, next_link: Optional[str] = None, value: Optional[List["_models.ModelContainer"]] = None, **kwargs: Any ) -> None: """ - :keyword relay_connection_string: Relay connection string. - :paramtype relay_connection_string: str - :keyword service_bus_connection_string: ServiceBus connection string. - :paramtype service_bus_connection_string: str - :keyword extension_principal_id: Extension principal-id. - :paramtype extension_principal_id: str - :keyword extension_instance_release_train: Extension instance release train. - :paramtype extension_instance_release_train: str - :keyword vc_name: VC name. - :paramtype vc_name: str - :keyword namespace: Compute namespace. - :paramtype namespace: str - :keyword default_instance_type: Default instance type. - :paramtype default_instance_type: str - :keyword instance_types: Instance Type Schema. - :paramtype instance_types: dict[str, - ~azure.mgmt.machinelearningservices.models.InstanceTypeSchema] + :keyword next_link: The link to the next page of ModelContainer objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type ModelContainer. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.ModelContainer] """ super().__init__(**kwargs) - self.relay_connection_string = relay_connection_string - self.service_bus_connection_string = service_bus_connection_string - self.extension_principal_id = extension_principal_id - self.extension_instance_release_train = extension_instance_release_train - self.vc_name = vc_name - self.namespace = namespace - self.default_instance_type = default_instance_type - self.instance_types = instance_types + self.next_link = next_link + self.value = value -class ListAmlUserFeatureResult(_serialization.Model): - """The List Aml user feature operation response. +class ModelPackageInput(_serialization.Model): + """Model package input options. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar value: The list of AML user facing features. - :vartype value: list[~azure.mgmt.machinelearningservices.models.AmlUserFeature] - :ivar next_link: The URI to fetch the next page of AML user features information. Call - ListNext() with this to fetch the next page of AML user features information. - :vartype next_link: str + :ivar input_type: [Required] Type of the input included in the target image. Required. Known + values are: "UriFile" and "UriFolder". + :vartype input_type: str or ~azure.mgmt.machinelearningservices.models.PackageInputType + :ivar mode: Input delivery mode of the input. Known values are: "Copy" and "Download". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.PackageInputDeliveryMode + :ivar mount_path: Relative mount path of the input in the target image. + :vartype mount_path: str + :ivar path: [Required] Location of the input. Required. + :vartype path: ~azure.mgmt.machinelearningservices.models.PackageInputPathBase """ - _validation = { - "value": {"readonly": True}, - "next_link": {"readonly": True}, + _validation = { + "input_type": {"required": True}, + "path": {"required": True}, } _attribute_map = { - "value": {"key": "value", "type": "[AmlUserFeature]"}, - "next_link": {"key": "nextLink", "type": "str"}, + "input_type": {"key": "inputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "mount_path": {"key": "mountPath", "type": "str"}, + "path": {"key": "path", "type": "PackageInputPathBase"}, } - def __init__(self, **kwargs: Any) -> None: - """ """ + def __init__( + self, + *, + input_type: Union[str, "_models.PackageInputType"], + path: "_models.PackageInputPathBase", + mode: Optional[Union[str, "_models.PackageInputDeliveryMode"]] = None, + mount_path: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword input_type: [Required] Type of the input included in the target image. Required. Known + values are: "UriFile" and "UriFolder". + :paramtype input_type: str or ~azure.mgmt.machinelearningservices.models.PackageInputType + :keyword mode: Input delivery mode of the input. Known values are: "Copy" and "Download". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.PackageInputDeliveryMode + :keyword mount_path: Relative mount path of the input in the target image. + :paramtype mount_path: str + :keyword path: [Required] Location of the input. Required. + :paramtype path: ~azure.mgmt.machinelearningservices.models.PackageInputPathBase + """ super().__init__(**kwargs) - self.value = None - self.next_link = None + self.input_type = input_type + self.mode = mode + self.mount_path = mount_path + self.path = path -class ListNotebookKeysResult(_serialization.Model): - """ListNotebookKeysResult. +class ModelPerformanceSignal(MonitoringSignalBase): + """Model performance signal definition. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar primary_access_key: - :vartype primary_access_key: str - :ivar secondary_access_key: - :vartype secondary_access_key: str + :ivar mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar signal_type: [Required] Specifies the type of signal to monitor. Required. Known values + are: "DataDrift", "PredictionDrift", "DataQuality", "FeatureAttributionDrift", "Custom", + "ModelPerformance", "GenerationSafetyQuality", and "GenerationTokenStatistics". + :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType + :ivar data_segment: The data segment. + :vartype data_segment: ~azure.mgmt.machinelearningservices.models.MonitoringDataSegment + :ivar metric_threshold: [Required] A list of metrics to calculate and their associated + thresholds. Required. + :vartype metric_threshold: + ~azure.mgmt.machinelearningservices.models.ModelPerformanceMetricThresholdBase + :ivar production_data: [Required] The data produced by the production service which drift will + be calculated for. Required. + :vartype production_data: + list[~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase] + :ivar reference_data: [Required] The data to calculate drift against. Required. + :vartype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase """ _validation = { - "primary_access_key": {"readonly": True}, - "secondary_access_key": {"readonly": True}, + "signal_type": {"required": True}, + "metric_threshold": {"required": True}, + "production_data": {"required": True}, + "reference_data": {"required": True}, } _attribute_map = { - "primary_access_key": {"key": "primaryAccessKey", "type": "str"}, - "secondary_access_key": {"key": "secondaryAccessKey", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "data_segment": {"key": "dataSegment", "type": "MonitoringDataSegment"}, + "metric_threshold": {"key": "metricThreshold", "type": "ModelPerformanceMetricThresholdBase"}, + "production_data": {"key": "productionData", "type": "[MonitoringInputDataBase]"}, + "reference_data": {"key": "referenceData", "type": "MonitoringInputDataBase"}, } - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.primary_access_key = None - self.secondary_access_key = None - - -class ListStorageAccountKeysResult(_serialization.Model): - """ListStorageAccountKeysResult. + def __init__( + self, + *, + metric_threshold: "_models.ModelPerformanceMetricThresholdBase", + production_data: List["_models.MonitoringInputDataBase"], + reference_data: "_models.MonitoringInputDataBase", + mode: Optional[Union[str, "_models.MonitoringNotificationMode"]] = None, + properties: Optional[Dict[str, str]] = None, + data_segment: Optional["_models.MonitoringDataSegment"] = None, + **kwargs: Any + ) -> None: + """ + :keyword mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword data_segment: The data segment. + :paramtype data_segment: ~azure.mgmt.machinelearningservices.models.MonitoringDataSegment + :keyword metric_threshold: [Required] A list of metrics to calculate and their associated + thresholds. Required. + :paramtype metric_threshold: + ~azure.mgmt.machinelearningservices.models.ModelPerformanceMetricThresholdBase + :keyword production_data: [Required] The data produced by the production service which drift + will be calculated for. Required. + :paramtype production_data: + list[~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase] + :keyword reference_data: [Required] The data to calculate drift against. Required. + :paramtype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase + """ + super().__init__(mode=mode, properties=properties, **kwargs) + self.signal_type: str = "ModelPerformance" + self.data_segment = data_segment + self.metric_threshold = metric_threshold + self.production_data = production_data + self.reference_data = reference_data + + +class ModelProfile(_serialization.Model): + """ModelProfile. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar user_storage_key: - :vartype user_storage_key: str + :ivar model_uri: [Required] The model to create a serverless endpoint of. Required. + :vartype model_uri: str """ _validation = { - "user_storage_key": {"readonly": True}, + "model_uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - "user_storage_key": {"key": "userStorageKey", "type": "str"}, + "model_uri": {"key": "modelUri", "type": "str"}, } - def __init__(self, **kwargs: Any) -> None: - """ """ + def __init__(self, *, model_uri: str, **kwargs: Any) -> None: + """ + :keyword model_uri: [Required] The model to create a serverless endpoint of. Required. + :paramtype model_uri: str + """ super().__init__(**kwargs) - self.user_storage_key = None + self.model_uri = model_uri -class ListUsagesResult(_serialization.Model): - """The List Usages operation response. +class ModelVersion(Resource): + """Azure Resource Manager resource envelope. Variables are only populated by the server, and will be ignored when sending a request. - :ivar value: The list of AML resource usages. - :vartype value: list[~azure.mgmt.machinelearningservices.models.Usage] - :ivar next_link: The URI to fetch the next page of AML resource usage information. Call - ListNext() with this to fetch the next page of AML resource usage information. - :vartype next_link: str + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.ModelVersionProperties """ _validation = { - "value": {"readonly": True}, - "next_link": {"readonly": True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - "value": {"key": "value", "type": "[Usage]"}, - "next_link": {"key": "nextLink", "type": "str"}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ModelVersionProperties"}, } - def __init__(self, **kwargs: Any) -> None: - """ """ + def __init__(self, *, properties: "_models.ModelVersionProperties", **kwargs: Any) -> None: + """ + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.ModelVersionProperties + """ super().__init__(**kwargs) - self.value = None - self.next_link = None + self.properties = properties -class ListWorkspaceKeysResult(_serialization.Model): - """ListWorkspaceKeysResult. +class ModelVersionProperties(AssetBase): # pylint: disable=too-many-instance-attributes + """Model asset version details. Variables are only populated by the server, and will be ignored when sending a request. - :ivar user_storage_key: - :vartype user_storage_key: str - :ivar user_storage_resource_id: - :vartype user_storage_resource_id: str - :ivar app_insights_instrumentation_key: - :vartype app_insights_instrumentation_key: str - :ivar container_registry_credentials: - :vartype container_registry_credentials: - ~azure.mgmt.machinelearningservices.models.RegistryListCredentialsResult - :ivar notebook_access_keys: - :vartype notebook_access_keys: - ~azure.mgmt.machinelearningservices.models.ListNotebookKeysResult + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :ivar is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. + :vartype is_archived: bool + :ivar flavors: Mapping of model flavors to their properties. + :vartype flavors: dict[str, ~azure.mgmt.machinelearningservices.models.FlavorData] + :ivar intellectual_property: Intellectual Property details. Used if model is an Intellectual + Property. + :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty + :ivar job_name: Name of the training job which produced this model. + :vartype job_name: str + :ivar model_type: The storage format for this entity. Used for NCD. + :vartype model_type: str + :ivar model_uri: The URI path to the model contents. + :vartype model_uri: str + :ivar provisioning_state: Provisioning state for the model version. Known values are: + "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.AssetProvisioningState + :ivar stage: Stage in the model lifecycle assigned to this model. + :vartype stage: str """ _validation = { - "user_storage_key": {"readonly": True}, - "user_storage_resource_id": {"readonly": True}, - "app_insights_instrumentation_key": {"readonly": True}, - "container_registry_credentials": {"readonly": True}, - "notebook_access_keys": {"readonly": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - "user_storage_key": {"key": "userStorageKey", "type": "str"}, - "user_storage_resource_id": {"key": "userStorageResourceId", "type": "str"}, - "app_insights_instrumentation_key": {"key": "appInsightsInstrumentationKey", "type": "str"}, - "container_registry_credentials": { - "key": "containerRegistryCredentials", - "type": "RegistryListCredentialsResult", - }, - "notebook_access_keys": {"key": "notebookAccessKeys", "type": "ListNotebookKeysResult"}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "flavors": {"key": "flavors", "type": "{FlavorData}"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "job_name": {"key": "jobName", "type": "str"}, + "model_type": {"key": "modelType", "type": "str"}, + "model_uri": {"key": "modelUri", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "stage": {"key": "stage", "type": "str"}, } - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.user_storage_key = None - self.user_storage_resource_id = None - self.app_insights_instrumentation_key = None - self.container_registry_credentials = None - self.notebook_access_keys = None - + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + is_anonymous: bool = False, + is_archived: bool = False, + flavors: Optional[Dict[str, "_models.FlavorData"]] = None, + intellectual_property: Optional["_models.IntellectualProperty"] = None, + job_name: Optional[str] = None, + model_type: Optional[str] = None, + model_uri: Optional[str] = None, + stage: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :keyword is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. + :paramtype is_archived: bool + :keyword flavors: Mapping of model flavors to their properties. + :paramtype flavors: dict[str, ~azure.mgmt.machinelearningservices.models.FlavorData] + :keyword intellectual_property: Intellectual Property details. Used if model is an Intellectual + Property. + :paramtype intellectual_property: + ~azure.mgmt.machinelearningservices.models.IntellectualProperty + :keyword job_name: Name of the training job which produced this model. + :paramtype job_name: str + :keyword model_type: The storage format for this entity. Used for NCD. + :paramtype model_type: str + :keyword model_uri: The URI path to the model contents. + :paramtype model_uri: str + :keyword stage: Stage in the model lifecycle assigned to this model. + :paramtype stage: str + """ + super().__init__( + description=description, + properties=properties, + tags=tags, + auto_delete_setting=auto_delete_setting, + is_anonymous=is_anonymous, + is_archived=is_archived, + **kwargs + ) + self.flavors = flavors + self.intellectual_property = intellectual_property + self.job_name = job_name + self.model_type = model_type + self.model_uri = model_uri + self.provisioning_state = None + self.stage = stage -class ListWorkspaceQuotas(_serialization.Model): - """The List WorkspaceQuotasByVMFamily operation response. - Variables are only populated by the server, and will be ignored when sending a request. +class ModelVersionResourceArmPaginatedResult(_serialization.Model): + """A paginated list of ModelVersion entities. - :ivar value: The list of Workspace Quotas by VM Family. - :vartype value: list[~azure.mgmt.machinelearningservices.models.ResourceQuota] - :ivar next_link: The URI to fetch the next page of workspace quota information by VM Family. - Call ListNext() with this to fetch the next page of Workspace Quota information. + :ivar next_link: The link to the next page of ModelVersion objects. If null, there are no + additional pages. :vartype next_link: str + :ivar value: An array of objects of type ModelVersion. + :vartype value: list[~azure.mgmt.machinelearningservices.models.ModelVersion] """ - _validation = { - "value": {"readonly": True}, - "next_link": {"readonly": True}, - } - _attribute_map = { - "value": {"key": "value", "type": "[ResourceQuota]"}, "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[ModelVersion]"}, } - def __init__(self, **kwargs: Any) -> None: - """ """ + def __init__( + self, *, next_link: Optional[str] = None, value: Optional[List["_models.ModelVersion"]] = None, **kwargs: Any + ) -> None: + """ + :keyword next_link: The link to the next page of ModelVersion objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type ModelVersion. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.ModelVersion] + """ super().__init__(**kwargs) - self.value = None - self.next_link = None + self.next_link = next_link + self.value = value -class LiteralJobInput(JobInput): - """Literal input type. +class MonitorComputeConfigurationBase(_serialization.Model): + """Monitor compute configuration base definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + MonitorServerlessSparkCompute All required parameters must be populated in order to send to Azure. - :ivar description: Description for the input. - :vartype description: str - :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: - "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and - "triton_model". - :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - :ivar value: [Required] Literal value for the input. Required. - :vartype value: str + :ivar compute_type: [Required] Specifies the type of signal to monitor. Required. + "ServerlessSpark" + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.MonitorComputeType """ _validation = { - "job_input_type": {"required": True}, - "value": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "compute_type": {"required": True}, } _attribute_map = { - "description": {"key": "description", "type": "str"}, - "job_input_type": {"key": "jobInputType", "type": "str"}, - "value": {"key": "value", "type": "str"}, + "compute_type": {"key": "computeType", "type": "str"}, } - def __init__(self, *, value: str, description: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword description: Description for the input. - :paramtype description: str - :keyword value: [Required] Literal value for the input. Required. - :paramtype value: str - """ - super().__init__(description=description, **kwargs) - self.job_input_type: str = "literal" - self.value = value + _subtype_map = {"compute_type": {"ServerlessSpark": "MonitorServerlessSparkCompute"}} + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.compute_type: Optional[str] = None -class ManagedIdentity(IdentityConfiguration): - """Managed identity configuration. + +class MonitorDefinition(_serialization.Model): + """MonitorDefinition. All required parameters must be populated in order to send to Azure. - :ivar identity_type: [Required] Specifies the type of identity framework. Required. Known - values are: "Managed", "AMLToken", and "UserIdentity". - :vartype identity_type: str or - ~azure.mgmt.machinelearningservices.models.IdentityConfigurationType - :ivar client_id: Specifies a user-assigned identity by client ID. For system-assigned, do not - set this field. - :vartype client_id: str - :ivar object_id: Specifies a user-assigned identity by object ID. For system-assigned, do not - set this field. - :vartype object_id: str - :ivar resource_id: Specifies a user-assigned identity by ARM resource ID. For system-assigned, - do not set this field. - :vartype resource_id: str + :ivar alert_notification_setting: The monitor's notification settings. + :vartype alert_notification_setting: + ~azure.mgmt.machinelearningservices.models.MonitoringAlertNotificationSettingsBase + :ivar compute_configuration: [Required] The ARM resource ID of the compute resource to run the + monitoring job on. Required. + :vartype compute_configuration: + ~azure.mgmt.machinelearningservices.models.MonitorComputeConfigurationBase + :ivar monitoring_target: The ARM resource ID of either the model or deployment targeted by this + monitor. + :vartype monitoring_target: ~azure.mgmt.machinelearningservices.models.MonitoringTarget + :ivar signals: [Required] The signals to monitor. Required. + :vartype signals: dict[str, ~azure.mgmt.machinelearningservices.models.MonitoringSignalBase] """ _validation = { - "identity_type": {"required": True}, + "compute_configuration": {"required": True}, + "signals": {"required": True}, } _attribute_map = { - "identity_type": {"key": "identityType", "type": "str"}, - "client_id": {"key": "clientId", "type": "str"}, - "object_id": {"key": "objectId", "type": "str"}, - "resource_id": {"key": "resourceId", "type": "str"}, + "alert_notification_setting": { + "key": "alertNotificationSetting", + "type": "MonitoringAlertNotificationSettingsBase", + }, + "compute_configuration": {"key": "computeConfiguration", "type": "MonitorComputeConfigurationBase"}, + "monitoring_target": {"key": "monitoringTarget", "type": "MonitoringTarget"}, + "signals": {"key": "signals", "type": "{MonitoringSignalBase}"}, } def __init__( self, *, - client_id: Optional[str] = None, - object_id: Optional[str] = None, - resource_id: Optional[str] = None, + compute_configuration: "_models.MonitorComputeConfigurationBase", + signals: Dict[str, "_models.MonitoringSignalBase"], + alert_notification_setting: Optional["_models.MonitoringAlertNotificationSettingsBase"] = None, + monitoring_target: Optional["_models.MonitoringTarget"] = None, **kwargs: Any ) -> None: """ - :keyword client_id: Specifies a user-assigned identity by client ID. For system-assigned, do - not set this field. - :paramtype client_id: str - :keyword object_id: Specifies a user-assigned identity by object ID. For system-assigned, do - not set this field. - :paramtype object_id: str - :keyword resource_id: Specifies a user-assigned identity by ARM resource ID. For - system-assigned, do not set this field. - :paramtype resource_id: str + :keyword alert_notification_setting: The monitor's notification settings. + :paramtype alert_notification_setting: + ~azure.mgmt.machinelearningservices.models.MonitoringAlertNotificationSettingsBase + :keyword compute_configuration: [Required] The ARM resource ID of the compute resource to run + the monitoring job on. Required. + :paramtype compute_configuration: + ~azure.mgmt.machinelearningservices.models.MonitorComputeConfigurationBase + :keyword monitoring_target: The ARM resource ID of either the model or deployment targeted by + this monitor. + :paramtype monitoring_target: ~azure.mgmt.machinelearningservices.models.MonitoringTarget + :keyword signals: [Required] The signals to monitor. Required. + :paramtype signals: dict[str, ~azure.mgmt.machinelearningservices.models.MonitoringSignalBase] """ super().__init__(**kwargs) - self.identity_type: str = "Managed" - self.client_id = client_id - self.object_id = object_id - self.resource_id = resource_id - - -class WorkspaceConnectionPropertiesV2(_serialization.Model): - """WorkspaceConnectionPropertiesV2. + self.alert_notification_setting = alert_notification_setting + self.compute_configuration = compute_configuration + self.monitoring_target = monitoring_target + self.signals = signals - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - ManagedIdentityAuthTypeWorkspaceConnectionProperties, - NoneAuthTypeWorkspaceConnectionProperties, PATAuthTypeWorkspaceConnectionProperties, - SASAuthTypeWorkspaceConnectionProperties, UsernamePasswordAuthTypeWorkspaceConnectionProperties - All required parameters must be populated in order to send to Azure. +class MonitoringDataSegment(_serialization.Model): + """MonitoringDataSegment. - :ivar auth_type: Authentication type of the connection target. Required. Known values are: - "PAT", "ManagedIdentity", "UsernamePassword", "None", and "SAS". - :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Known values are: "PythonFeed", - "ContainerRegistry", and "Git". - :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :ivar target: - :vartype target: str - :ivar value: Value details of the workspace connection. - :vartype value: str - :ivar value_format: format for the workspace connection value. "JSON" - :vartype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :ivar feature: The feature to segment the data on. + :vartype feature: str + :ivar values: Filters for only the specified values of the given segmented feature. + :vartype values: list[str] """ - _validation = { - "auth_type": {"required": True}, - } - _attribute_map = { - "auth_type": {"key": "authType", "type": "str"}, - "category": {"key": "category", "type": "str"}, - "target": {"key": "target", "type": "str"}, - "value": {"key": "value", "type": "str"}, - "value_format": {"key": "valueFormat", "type": "str"}, - } - - _subtype_map = { - "auth_type": { - "ManagedIdentity": "ManagedIdentityAuthTypeWorkspaceConnectionProperties", - "None": "NoneAuthTypeWorkspaceConnectionProperties", - "PAT": "PATAuthTypeWorkspaceConnectionProperties", - "SAS": "SASAuthTypeWorkspaceConnectionProperties", - "UsernamePassword": "UsernamePasswordAuthTypeWorkspaceConnectionProperties", - } + "feature": {"key": "feature", "type": "str"}, + "values": {"key": "values", "type": "[str]"}, } - def __init__( - self, - *, - category: Optional[Union[str, "_models.ConnectionCategory"]] = None, - target: Optional[str] = None, - value: Optional[str] = None, - value_format: Optional[Union[str, "_models.ValueFormat"]] = None, - **kwargs: Any - ) -> None: + def __init__(self, *, feature: Optional[str] = None, values: Optional[List[str]] = None, **kwargs: Any) -> None: """ - :keyword category: Category of the connection. Known values are: "PythonFeed", - "ContainerRegistry", and "Git". - :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :keyword target: - :paramtype target: str - :keyword value: Value details of the workspace connection. - :paramtype value: str - :keyword value_format: format for the workspace connection value. "JSON" - :paramtype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :keyword feature: The feature to segment the data on. + :paramtype feature: str + :keyword values: Filters for only the specified values of the given segmented feature. + :paramtype values: list[str] """ super().__init__(**kwargs) - self.auth_type: Optional[str] = None - self.category = category - self.target = target - self.value = value - self.value_format = value_format + self.feature = feature + self.values = values -class ManagedIdentityAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): - """ManagedIdentityAuthTypeWorkspaceConnectionProperties. +class MonitoringTarget(_serialization.Model): + """Monitoring target definition. All required parameters must be populated in order to send to Azure. - :ivar auth_type: Authentication type of the connection target. Required. Known values are: - "PAT", "ManagedIdentity", "UsernamePassword", "None", and "SAS". - :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Known values are: "PythonFeed", - "ContainerRegistry", and "Git". - :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :ivar target: - :vartype target: str - :ivar value: Value details of the workspace connection. - :vartype value: str - :ivar value_format: format for the workspace connection value. "JSON" - :vartype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat - :ivar credentials: - :vartype credentials: - ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionManagedIdentity + :ivar deployment_id: The ARM resource ID of either the deployment targeted by this monitor. + :vartype deployment_id: str + :ivar model_id: The ARM resource ID of either the model targeted by this monitor. + :vartype model_id: str + :ivar task_type: [Required] The machine learning task type of the model. Required. Known values + are: "Classification", "Regression", and "QuestionAnswering". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.ModelTaskType """ _validation = { - "auth_type": {"required": True}, + "task_type": {"required": True}, } _attribute_map = { - "auth_type": {"key": "authType", "type": "str"}, - "category": {"key": "category", "type": "str"}, - "target": {"key": "target", "type": "str"}, - "value": {"key": "value", "type": "str"}, - "value_format": {"key": "valueFormat", "type": "str"}, - "credentials": {"key": "credentials", "type": "WorkspaceConnectionManagedIdentity"}, + "deployment_id": {"key": "deploymentId", "type": "str"}, + "model_id": {"key": "modelId", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, } def __init__( self, *, - category: Optional[Union[str, "_models.ConnectionCategory"]] = None, - target: Optional[str] = None, - value: Optional[str] = None, - value_format: Optional[Union[str, "_models.ValueFormat"]] = None, - credentials: Optional["_models.WorkspaceConnectionManagedIdentity"] = None, + task_type: Union[str, "_models.ModelTaskType"], + deployment_id: Optional[str] = None, + model_id: Optional[str] = None, **kwargs: Any ) -> None: """ - :keyword category: Category of the connection. Known values are: "PythonFeed", - "ContainerRegistry", and "Git". - :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :keyword target: - :paramtype target: str - :keyword value: Value details of the workspace connection. - :paramtype value: str - :keyword value_format: format for the workspace connection value. "JSON" - :paramtype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat - :keyword credentials: - :paramtype credentials: - ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionManagedIdentity + :keyword deployment_id: The ARM resource ID of either the deployment targeted by this monitor. + :paramtype deployment_id: str + :keyword model_id: The ARM resource ID of either the model targeted by this monitor. + :paramtype model_id: str + :keyword task_type: [Required] The machine learning task type of the model. Required. Known + values are: "Classification", "Regression", and "QuestionAnswering". + :paramtype task_type: str or ~azure.mgmt.machinelearningservices.models.ModelTaskType """ - super().__init__(category=category, target=target, value=value, value_format=value_format, **kwargs) - self.auth_type: str = "ManagedIdentity" - self.credentials = credentials + super().__init__(**kwargs) + self.deployment_id = deployment_id + self.model_id = model_id + self.task_type = task_type -class ManagedOnlineDeployment(OnlineDeploymentProperties): # pylint: disable=too-many-instance-attributes - """Properties specific to a ManagedOnlineDeployment. +class MonitoringThreshold(_serialization.Model): + """MonitoringThreshold. - Variables are only populated by the server, and will be ignored when sending a request. + :ivar value: The threshold value. If null, the set default is dependent on the metric type. + :vartype value: float + """ - All required parameters must be populated in order to send to Azure. + _attribute_map = { + "value": {"key": "value", "type": "float"}, + } - :ivar code_configuration: Code configuration for the endpoint deployment. - :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration - :ivar description: Description of the endpoint deployment. - :vartype description: str - :ivar environment_id: ARM resource ID or AssetId of the environment specification for the - endpoint deployment. - :vartype environment_id: str - :ivar environment_variables: Environment variables configuration for the deployment. + def __init__(self, *, value: Optional[float] = None, **kwargs: Any) -> None: + """ + :keyword value: The threshold value. If null, the set default is dependent on the metric type. + :paramtype value: float + """ + super().__init__(**kwargs) + self.value = value + + +class MonitoringWorkspaceConnection(_serialization.Model): + """Monitoring workspace connection definition. + + :ivar environment_variables: The properties of a workspace service connection to store as + environment variables in the submitted jobs. + Key is workspace connection property path, name is environment variable key. :vartype environment_variables: dict[str, str] - :ivar properties: Property dictionary. Properties can be added, but not removed or altered. - :vartype properties: dict[str, str] - :ivar app_insights_enabled: If true, enables Application Insights logging. - :vartype app_insights_enabled: bool - :ivar egress_public_network_access: If Enabled, allow egress public network access. If - Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and - "Disabled". - :vartype egress_public_network_access: str or - ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType - :ivar endpoint_compute_type: [Required] The compute type of the endpoint. Required. Known - values are: "Managed", "Kubernetes", and "AzureMLCompute". - :vartype endpoint_compute_type: str or - ~azure.mgmt.machinelearningservices.models.EndpointComputeType - :ivar instance_type: Compute instance type. - :vartype instance_type: str - :ivar liveness_probe: Liveness probe monitors the health of the container regularly. - :vartype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :ivar model: The URI path to the model. - :vartype model: str - :ivar model_mount_path: The path to mount the model in custom container. - :vartype model_mount_path: str - :ivar provisioning_state: Provisioning state for the endpoint deployment. Known values are: - "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", and "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState - :ivar readiness_probe: Readiness probe validates if the container is ready to serve traffic. - The properties and defaults are the same as liveness probe. - :vartype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :ivar request_settings: Request settings for the deployment. - :vartype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings - :ivar scale_settings: Scale settings for the deployment. - If it is null or not provided, - it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment - and to DefaultScaleSettings for ManagedOnlineDeployment. - :vartype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + :ivar secrets: The properties of a workspace service connection to store as secrets in the + submitted jobs. + Key is workspace connection property path, name is secret key. + :vartype secrets: dict[str, str] """ - _validation = { - "endpoint_compute_type": {"required": True}, - "provisioning_state": {"readonly": True}, - } - _attribute_map = { - "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, - "description": {"key": "description", "type": "str"}, - "environment_id": {"key": "environmentId", "type": "str"}, - "environment_variables": {"key": "environmentVariables", "type": "{str}"}, - "properties": {"key": "properties", "type": "{str}"}, - "app_insights_enabled": {"key": "appInsightsEnabled", "type": "bool"}, - "egress_public_network_access": {"key": "egressPublicNetworkAccess", "type": "str"}, - "endpoint_compute_type": {"key": "endpointComputeType", "type": "str"}, - "instance_type": {"key": "instanceType", "type": "str"}, - "liveness_probe": {"key": "livenessProbe", "type": "ProbeSettings"}, - "model": {"key": "model", "type": "str"}, - "model_mount_path": {"key": "modelMountPath", "type": "str"}, - "provisioning_state": {"key": "provisioningState", "type": "str"}, - "readiness_probe": {"key": "readinessProbe", "type": "ProbeSettings"}, - "request_settings": {"key": "requestSettings", "type": "OnlineRequestSettings"}, - "scale_settings": {"key": "scaleSettings", "type": "OnlineScaleSettings"}, - } - - def __init__( - self, - *, - code_configuration: Optional["_models.CodeConfiguration"] = None, - description: Optional[str] = None, - environment_id: Optional[str] = None, - environment_variables: Optional[Dict[str, str]] = None, - properties: Optional[Dict[str, str]] = None, - app_insights_enabled: bool = False, - egress_public_network_access: Optional[Union[str, "_models.EgressPublicNetworkAccessType"]] = None, - instance_type: Optional[str] = None, - liveness_probe: Optional["_models.ProbeSettings"] = None, - model: Optional[str] = None, - model_mount_path: Optional[str] = None, - readiness_probe: Optional["_models.ProbeSettings"] = None, - request_settings: Optional["_models.OnlineRequestSettings"] = None, - scale_settings: Optional["_models.OnlineScaleSettings"] = None, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "secrets": {"key": "secrets", "type": "{str}"}, + } + + def __init__( + self, + *, + environment_variables: Optional[Dict[str, str]] = None, + secrets: Optional[Dict[str, str]] = None, **kwargs: Any ) -> None: """ - :keyword code_configuration: Code configuration for the endpoint deployment. - :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration - :keyword description: Description of the endpoint deployment. - :paramtype description: str - :keyword environment_id: ARM resource ID or AssetId of the environment specification for the - endpoint deployment. - :paramtype environment_id: str - :keyword environment_variables: Environment variables configuration for the deployment. + :keyword environment_variables: The properties of a workspace service connection to store as + environment variables in the submitted jobs. + Key is workspace connection property path, name is environment variable key. :paramtype environment_variables: dict[str, str] - :keyword properties: Property dictionary. Properties can be added, but not removed or altered. - :paramtype properties: dict[str, str] - :keyword app_insights_enabled: If true, enables Application Insights logging. - :paramtype app_insights_enabled: bool - :keyword egress_public_network_access: If Enabled, allow egress public network access. If - Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and - "Disabled". - :paramtype egress_public_network_access: str or - ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType - :keyword instance_type: Compute instance type. - :paramtype instance_type: str - :keyword liveness_probe: Liveness probe monitors the health of the container regularly. - :paramtype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :keyword model: The URI path to the model. - :paramtype model: str - :keyword model_mount_path: The path to mount the model in custom container. - :paramtype model_mount_path: str - :keyword readiness_probe: Readiness probe validates if the container is ready to serve traffic. - The properties and defaults are the same as liveness probe. - :paramtype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :keyword request_settings: Request settings for the deployment. - :paramtype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings - :keyword scale_settings: Scale settings for the deployment. - If it is null or not provided, - it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment - and to DefaultScaleSettings for ManagedOnlineDeployment. - :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + :keyword secrets: The properties of a workspace service connection to store as secrets in the + submitted jobs. + Key is workspace connection property path, name is secret key. + :paramtype secrets: dict[str, str] """ - super().__init__( - code_configuration=code_configuration, - description=description, - environment_id=environment_id, - environment_variables=environment_variables, - properties=properties, - app_insights_enabled=app_insights_enabled, - egress_public_network_access=egress_public_network_access, - instance_type=instance_type, - liveness_probe=liveness_probe, - model=model, - model_mount_path=model_mount_path, - readiness_probe=readiness_probe, - request_settings=request_settings, - scale_settings=scale_settings, - **kwargs - ) - self.endpoint_compute_type: str = "Managed" - + super().__init__(**kwargs) + self.environment_variables = environment_variables + self.secrets = secrets -class ManagedServiceIdentity(_serialization.Model): - """Managed service identity (system assigned and/or user assigned identities). - Variables are only populated by the server, and will be ignored when sending a request. +class MonitorServerlessSparkCompute(MonitorComputeConfigurationBase): + """Monitor serverless spark compute definition. All required parameters must be populated in order to send to Azure. - :ivar principal_id: The service principal ID of the system assigned identity. This property - will only be provided for a system assigned identity. - :vartype principal_id: str - :ivar tenant_id: The tenant ID of the system assigned identity. This property will only be - provided for a system assigned identity. - :vartype tenant_id: str - :ivar type: Type of managed service identity (where both SystemAssigned and UserAssigned types - are allowed). Required. Known values are: "None", "SystemAssigned", "UserAssigned", and - "SystemAssigned,UserAssigned". - :vartype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType - :ivar user_assigned_identities: The set of user assigned identities associated with the - resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: - '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. - The dictionary values can be empty objects ({}) in requests. - :vartype user_assigned_identities: dict[str, - ~azure.mgmt.machinelearningservices.models.UserAssignedIdentity] + :ivar compute_type: [Required] Specifies the type of signal to monitor. Required. + "ServerlessSpark" + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.MonitorComputeType + :ivar compute_identity: [Required] The identity scheme leveraged to by the spark jobs running + on serverless Spark. Required. + :vartype compute_identity: + ~azure.mgmt.machinelearningservices.models.MonitorComputeIdentityBase + :ivar instance_type: [Required] The instance type running the Spark job. Required. + :vartype instance_type: str + :ivar runtime_version: [Required] The Spark runtime version. Required. + :vartype runtime_version: str """ _validation = { - "principal_id": {"readonly": True}, - "tenant_id": {"readonly": True}, - "type": {"required": True}, + "compute_type": {"required": True}, + "compute_identity": {"required": True}, + "instance_type": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "runtime_version": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - "principal_id": {"key": "principalId", "type": "str"}, - "tenant_id": {"key": "tenantId", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "user_assigned_identities": {"key": "userAssignedIdentities", "type": "{UserAssignedIdentity}"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_identity": {"key": "computeIdentity", "type": "MonitorComputeIdentityBase"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "runtime_version": {"key": "runtimeVersion", "type": "str"}, } def __init__( self, *, - type: Union[str, "_models.ManagedServiceIdentityType"], - user_assigned_identities: Optional[Dict[str, "_models.UserAssignedIdentity"]] = None, + compute_identity: "_models.MonitorComputeIdentityBase", + instance_type: str, + runtime_version: str, **kwargs: Any ) -> None: """ - :keyword type: Type of managed service identity (where both SystemAssigned and UserAssigned - types are allowed). Required. Known values are: "None", "SystemAssigned", "UserAssigned", and - "SystemAssigned,UserAssigned". - :paramtype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType - :keyword user_assigned_identities: The set of user assigned identities associated with the - resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: - '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. - The dictionary values can be empty objects ({}) in requests. - :paramtype user_assigned_identities: dict[str, - ~azure.mgmt.machinelearningservices.models.UserAssignedIdentity] + :keyword compute_identity: [Required] The identity scheme leveraged to by the spark jobs + running on serverless Spark. Required. + :paramtype compute_identity: + ~azure.mgmt.machinelearningservices.models.MonitorComputeIdentityBase + :keyword instance_type: [Required] The instance type running the Spark job. Required. + :paramtype instance_type: str + :keyword runtime_version: [Required] The Spark runtime version. Required. + :paramtype runtime_version: str """ super().__init__(**kwargs) - self.principal_id = None - self.tenant_id = None - self.type = type - self.user_assigned_identities = user_assigned_identities + self.compute_type: str = "ServerlessSpark" + self.compute_identity = compute_identity + self.instance_type = instance_type + self.runtime_version = runtime_version -class MedianStoppingPolicy(EarlyTerminationPolicy): - """Defines an early termination policy based on running averages of the primary metric of all - runs. +class Mpi(DistributionConfiguration): + """MPI distribution configuration. All required parameters must be populated in order to send to Azure. - :ivar delay_evaluation: Number of intervals by which to delay the first evaluation. - :vartype delay_evaluation: int - :ivar evaluation_interval: Interval (number of runs) between policy evaluations. - :vartype evaluation_interval: int - :ivar policy_type: [Required] Name of policy configuration. Required. Known values are: - "Bandit", "MedianStopping", and "TruncationSelection". - :vartype policy_type: str or - ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicyType + :ivar distribution_type: [Required] Specifies the type of distribution framework. Required. + Known values are: "PyTorch", "TensorFlow", "Mpi", and "Ray". + :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType + :ivar process_count_per_instance: Number of processes per MPI node. + :vartype process_count_per_instance: int """ _validation = { - "policy_type": {"required": True}, + "distribution_type": {"required": True}, } _attribute_map = { - "delay_evaluation": {"key": "delayEvaluation", "type": "int"}, - "evaluation_interval": {"key": "evaluationInterval", "type": "int"}, - "policy_type": {"key": "policyType", "type": "str"}, + "distribution_type": {"key": "distributionType", "type": "str"}, + "process_count_per_instance": {"key": "processCountPerInstance", "type": "int"}, } - def __init__(self, *, delay_evaluation: int = 0, evaluation_interval: int = 0, **kwargs: Any) -> None: + def __init__(self, *, process_count_per_instance: Optional[int] = None, **kwargs: Any) -> None: """ - :keyword delay_evaluation: Number of intervals by which to delay the first evaluation. - :paramtype delay_evaluation: int - :keyword evaluation_interval: Interval (number of runs) between policy evaluations. - :paramtype evaluation_interval: int + :keyword process_count_per_instance: Number of processes per MPI node. + :paramtype process_count_per_instance: int """ - super().__init__(delay_evaluation=delay_evaluation, evaluation_interval=evaluation_interval, **kwargs) - self.policy_type: str = "MedianStopping" + super().__init__(**kwargs) + self.distribution_type: str = "Mpi" + self.process_count_per_instance = process_count_per_instance -class MLFlowModelJobInput(AssetJobInput, JobInput): - """MLFlowModelJobInput. +class NlpFixedParameters(_serialization.Model): + """Fixed training parameters that won't be swept over during AutoML NLP training. - All required parameters must be populated in order to send to Azure. + :ivar gradient_accumulation_steps: Number of steps to accumulate gradients over before running + a backward pass. + :vartype gradient_accumulation_steps: int + :ivar learning_rate: The learning rate for the training procedure. + :vartype learning_rate: float + :ivar learning_rate_scheduler: The type of learning rate schedule to use during the training + procedure. Known values are: "None", "Linear", "Cosine", "CosineWithRestarts", "Polynomial", + "Constant", and "ConstantWithWarmup". + :vartype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.NlpLearningRateScheduler + :ivar model_name: The name of the model to train. + :vartype model_name: str + :ivar number_of_epochs: Number of training epochs. + :vartype number_of_epochs: int + :ivar training_batch_size: The batch size for the training procedure. + :vartype training_batch_size: int + :ivar validation_batch_size: The batch size to be used during evaluation. + :vartype validation_batch_size: int + :ivar warmup_ratio: The warmup ratio, used alongside LrSchedulerType. + :vartype warmup_ratio: float + :ivar weight_decay: The weight decay for the training procedure. + :vartype weight_decay: float + """ - :ivar description: Description for the input. - :vartype description: str - :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: - "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and - "triton_model". - :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", - "Download", "Direct", "EvalMount", and "EvalDownload". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :ivar uri: [Required] Input Asset URI. Required. - :vartype uri: str + _attribute_map = { + "gradient_accumulation_steps": {"key": "gradientAccumulationSteps", "type": "int"}, + "learning_rate": {"key": "learningRate", "type": "float"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "int"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "int"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "int"}, + "warmup_ratio": {"key": "warmupRatio", "type": "float"}, + "weight_decay": {"key": "weightDecay", "type": "float"}, + } + + def __init__( + self, + *, + gradient_accumulation_steps: Optional[int] = None, + learning_rate: Optional[float] = None, + learning_rate_scheduler: Optional[Union[str, "_models.NlpLearningRateScheduler"]] = None, + model_name: Optional[str] = None, + number_of_epochs: Optional[int] = None, + training_batch_size: Optional[int] = None, + validation_batch_size: Optional[int] = None, + warmup_ratio: Optional[float] = None, + weight_decay: Optional[float] = None, + **kwargs: Any + ) -> None: + """ + :keyword gradient_accumulation_steps: Number of steps to accumulate gradients over before + running a backward pass. + :paramtype gradient_accumulation_steps: int + :keyword learning_rate: The learning rate for the training procedure. + :paramtype learning_rate: float + :keyword learning_rate_scheduler: The type of learning rate schedule to use during the training + procedure. Known values are: "None", "Linear", "Cosine", "CosineWithRestarts", "Polynomial", + "Constant", and "ConstantWithWarmup". + :paramtype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.NlpLearningRateScheduler + :keyword model_name: The name of the model to train. + :paramtype model_name: str + :keyword number_of_epochs: Number of training epochs. + :paramtype number_of_epochs: int + :keyword training_batch_size: The batch size for the training procedure. + :paramtype training_batch_size: int + :keyword validation_batch_size: The batch size to be used during evaluation. + :paramtype validation_batch_size: int + :keyword warmup_ratio: The warmup ratio, used alongside LrSchedulerType. + :paramtype warmup_ratio: float + :keyword weight_decay: The weight decay for the training procedure. + :paramtype weight_decay: float + """ + super().__init__(**kwargs) + self.gradient_accumulation_steps = gradient_accumulation_steps + self.learning_rate = learning_rate + self.learning_rate_scheduler = learning_rate_scheduler + self.model_name = model_name + self.number_of_epochs = number_of_epochs + self.training_batch_size = training_batch_size + self.validation_batch_size = validation_batch_size + self.warmup_ratio = warmup_ratio + self.weight_decay = weight_decay + + +class NlpParameterSubspace(_serialization.Model): + """Stringified search spaces for each parameter. See below examples. + + :ivar gradient_accumulation_steps: Number of steps to accumulate gradients over before running + a backward pass. + :vartype gradient_accumulation_steps: str + :ivar learning_rate: The learning rate for the training procedure. + :vartype learning_rate: str + :ivar learning_rate_scheduler: The type of learning rate schedule to use during the training + procedure. + :vartype learning_rate_scheduler: str + :ivar model_name: The name of the model to train. + :vartype model_name: str + :ivar number_of_epochs: Number of training epochs. + :vartype number_of_epochs: str + :ivar training_batch_size: The batch size for the training procedure. + :vartype training_batch_size: str + :ivar validation_batch_size: The batch size to be used during evaluation. + :vartype validation_batch_size: str + :ivar warmup_ratio: The warmup ratio, used alongside LrSchedulerType. + :vartype warmup_ratio: str + :ivar weight_decay: The weight decay for the training procedure. + :vartype weight_decay: str + """ + + _attribute_map = { + "gradient_accumulation_steps": {"key": "gradientAccumulationSteps", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "str"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "str"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "str"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "str"}, + "warmup_ratio": {"key": "warmupRatio", "type": "str"}, + "weight_decay": {"key": "weightDecay", "type": "str"}, + } + + def __init__( + self, + *, + gradient_accumulation_steps: Optional[str] = None, + learning_rate: Optional[str] = None, + learning_rate_scheduler: Optional[str] = None, + model_name: Optional[str] = None, + number_of_epochs: Optional[str] = None, + training_batch_size: Optional[str] = None, + validation_batch_size: Optional[str] = None, + warmup_ratio: Optional[str] = None, + weight_decay: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword gradient_accumulation_steps: Number of steps to accumulate gradients over before + running a backward pass. + :paramtype gradient_accumulation_steps: str + :keyword learning_rate: The learning rate for the training procedure. + :paramtype learning_rate: str + :keyword learning_rate_scheduler: The type of learning rate schedule to use during the training + procedure. + :paramtype learning_rate_scheduler: str + :keyword model_name: The name of the model to train. + :paramtype model_name: str + :keyword number_of_epochs: Number of training epochs. + :paramtype number_of_epochs: str + :keyword training_batch_size: The batch size for the training procedure. + :paramtype training_batch_size: str + :keyword validation_batch_size: The batch size to be used during evaluation. + :paramtype validation_batch_size: str + :keyword warmup_ratio: The warmup ratio, used alongside LrSchedulerType. + :paramtype warmup_ratio: str + :keyword weight_decay: The weight decay for the training procedure. + :paramtype weight_decay: str + """ + super().__init__(**kwargs) + self.gradient_accumulation_steps = gradient_accumulation_steps + self.learning_rate = learning_rate + self.learning_rate_scheduler = learning_rate_scheduler + self.model_name = model_name + self.number_of_epochs = number_of_epochs + self.training_batch_size = training_batch_size + self.validation_batch_size = validation_batch_size + self.warmup_ratio = warmup_ratio + self.weight_decay = weight_decay + + +class NlpSweepSettings(_serialization.Model): + """Model sweeping and hyperparameter tuning related settings. + + All required parameters must be populated in order to send to Azure. + + :ivar early_termination: Type of early termination policy for the sweeping job. + :vartype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy + :ivar sampling_algorithm: [Required] Type of sampling algorithm. Required. Known values are: + "Grid", "Random", and "Bayesian". + :vartype sampling_algorithm: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType """ _validation = { - "job_input_type": {"required": True}, - "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "sampling_algorithm": {"required": True}, } _attribute_map = { - "description": {"key": "description", "type": "str"}, - "job_input_type": {"key": "jobInputType", "type": "str"}, - "mode": {"key": "mode", "type": "str"}, - "uri": {"key": "uri", "type": "str"}, + "early_termination": {"key": "earlyTermination", "type": "EarlyTerminationPolicy"}, + "sampling_algorithm": {"key": "samplingAlgorithm", "type": "str"}, } def __init__( self, *, - uri: str, - description: Optional[str] = None, - mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, + sampling_algorithm: Union[str, "_models.SamplingAlgorithmType"], + early_termination: Optional["_models.EarlyTerminationPolicy"] = None, **kwargs: Any ) -> None: """ - :keyword description: Description for the input. - :paramtype description: str - :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", - "Download", "Direct", "EvalMount", and "EvalDownload". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :keyword uri: [Required] Input Asset URI. Required. - :paramtype uri: str + :keyword early_termination: Type of early termination policy for the sweeping job. + :paramtype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy + :keyword sampling_algorithm: [Required] Type of sampling algorithm. Required. Known values are: + "Grid", "Random", and "Bayesian". + :paramtype sampling_algorithm: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType """ - super().__init__(mode=mode, uri=uri, description=description, **kwargs) - self.description = description - self.job_input_type: str = "mlflow_model" - self.mode = mode - self.uri = uri - + super().__init__(**kwargs) + self.early_termination = early_termination + self.sampling_algorithm = sampling_algorithm -class MLFlowModelJobOutput(AssetJobOutput, JobOutput): - """MLFlowModelJobOutput. - All required parameters must be populated in order to send to Azure. +class NlpVertical(_serialization.Model): + """Abstract class for NLP related AutoML tasks. + NLP - Natural Language Processing. - :ivar description: Description for the output. - :vartype description: str - :ivar job_output_type: [Required] Specifies the type of job. Required. Known values are: - "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". - :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType - :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount" and "Upload". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode - :ivar uri: Output Asset URI. - :vartype uri: str + :ivar featurization_settings: Featurization inputs needed for AutoML job. + :vartype featurization_settings: + ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :ivar fixed_parameters: Model/training parameters that will remain constant throughout + training. + :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters + :ivar limit_settings: Execution constraints for AutoMLJob. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] + :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput """ - _validation = { - "job_output_type": {"required": True}, - } - _attribute_map = { - "description": {"key": "description", "type": "str"}, - "job_output_type": {"key": "jobOutputType", "type": "str"}, - "mode": {"key": "mode", "type": "str"}, - "uri": {"key": "uri", "type": "str"}, + "featurization_settings": {"key": "featurizationSettings", "type": "NlpVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "NlpFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "NlpVerticalLimitSettings"}, + "search_space": {"key": "searchSpace", "type": "[NlpParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "NlpSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, } def __init__( self, *, - description: Optional[str] = None, - mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, - uri: Optional[str] = None, + featurization_settings: Optional["_models.NlpVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.NlpFixedParameters"] = None, + limit_settings: Optional["_models.NlpVerticalLimitSettings"] = None, + search_space: Optional[List["_models.NlpParameterSubspace"]] = None, + sweep_settings: Optional["_models.NlpSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, **kwargs: Any ) -> None: """ - :keyword description: Description for the output. - :paramtype description: str - :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount" and "Upload". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode - :keyword uri: Output Asset URI. - :paramtype uri: str + :keyword featurization_settings: Featurization inputs needed for AutoML job. + :paramtype featurization_settings: + ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :keyword fixed_parameters: Model/training parameters that will remain constant throughout + training. + :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters + :keyword limit_settings: Execution constraints for AutoMLJob. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] + :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput """ - super().__init__(mode=mode, uri=uri, description=description, **kwargs) - self.description = description - self.job_output_type: str = "mlflow_model" - self.mode = mode - self.uri = uri - + super().__init__(**kwargs) + self.featurization_settings = featurization_settings + self.fixed_parameters = fixed_parameters + self.limit_settings = limit_settings + self.search_space = search_space + self.sweep_settings = sweep_settings + self.validation_data = validation_data -class MLTableData(DataVersionBaseProperties): - """MLTable data definition. - All required parameters must be populated in order to send to Azure. +class NlpVerticalFeaturizationSettings(FeaturizationSettings): + """NlpVerticalFeaturizationSettings. - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar is_anonymous: If the name version are system generated (anonymous registration). - :vartype is_anonymous: bool - :ivar is_archived: Is the asset archived?. - :vartype is_archived: bool - :ivar data_type: [Required] Specifies the type of data. Required. Known values are: "uri_file", - "uri_folder", and "mltable". - :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType - :ivar data_uri: [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. Required. - :vartype data_uri: str - :ivar referenced_uris: Uris referenced in the MLTable definition (required for lineage). - :vartype referenced_uris: list[str] + :ivar dataset_language: Dataset language, useful for the text data. + :vartype dataset_language: str """ - _validation = { - "data_type": {"required": True}, - "data_uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + _attribute_map = { + "dataset_language": {"key": "datasetLanguage", "type": "str"}, } + def __init__(self, *, dataset_language: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword dataset_language: Dataset language, useful for the text data. + :paramtype dataset_language: str + """ + super().__init__(dataset_language=dataset_language, **kwargs) + + +class NlpVerticalLimitSettings(_serialization.Model): + """Job execution constraints. + + :ivar max_concurrent_trials: Maximum Concurrent AutoML iterations. + :vartype max_concurrent_trials: int + :ivar max_nodes: Maximum nodes to use for the experiment. + :vartype max_nodes: int + :ivar max_trials: Number of AutoML iterations. + :vartype max_trials: int + :ivar timeout: AutoML job timeout. + :vartype timeout: ~datetime.timedelta + :ivar trial_timeout: Timeout for individual HD trials. + :vartype trial_timeout: ~datetime.timedelta + """ + _attribute_map = { - "description": {"key": "description", "type": "str"}, - "properties": {"key": "properties", "type": "{str}"}, - "tags": {"key": "tags", "type": "{str}"}, - "is_anonymous": {"key": "isAnonymous", "type": "bool"}, - "is_archived": {"key": "isArchived", "type": "bool"}, - "data_type": {"key": "dataType", "type": "str"}, - "data_uri": {"key": "dataUri", "type": "str"}, - "referenced_uris": {"key": "referencedUris", "type": "[str]"}, + "max_concurrent_trials": {"key": "maxConcurrentTrials", "type": "int"}, + "max_nodes": {"key": "maxNodes", "type": "int"}, + "max_trials": {"key": "maxTrials", "type": "int"}, + "timeout": {"key": "timeout", "type": "duration"}, + "trial_timeout": {"key": "trialTimeout", "type": "duration"}, } def __init__( self, *, - data_uri: str, - description: Optional[str] = None, - properties: Optional[Dict[str, str]] = None, - tags: Optional[Dict[str, str]] = None, - is_anonymous: bool = False, - is_archived: bool = False, - referenced_uris: Optional[List[str]] = None, + max_concurrent_trials: int = 1, + max_nodes: int = 1, + max_trials: int = 1, + timeout: datetime.timedelta = "P7D", + trial_timeout: Optional[datetime.timedelta] = None, **kwargs: Any ) -> None: """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword is_anonymous: If the name version are system generated (anonymous registration). - :paramtype is_anonymous: bool - :keyword is_archived: Is the asset archived?. - :paramtype is_archived: bool - :keyword data_uri: [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. Required. - :paramtype data_uri: str - :keyword referenced_uris: Uris referenced in the MLTable definition (required for lineage). - :paramtype referenced_uris: list[str] + :keyword max_concurrent_trials: Maximum Concurrent AutoML iterations. + :paramtype max_concurrent_trials: int + :keyword max_nodes: Maximum nodes to use for the experiment. + :paramtype max_nodes: int + :keyword max_trials: Number of AutoML iterations. + :paramtype max_trials: int + :keyword timeout: AutoML job timeout. + :paramtype timeout: ~datetime.timedelta + :keyword trial_timeout: Timeout for individual HD trials. + :paramtype trial_timeout: ~datetime.timedelta """ - super().__init__( - description=description, - properties=properties, - tags=tags, - is_anonymous=is_anonymous, - is_archived=is_archived, - data_uri=data_uri, - **kwargs - ) - self.data_type: str = "mltable" - self.referenced_uris = referenced_uris + super().__init__(**kwargs) + self.max_concurrent_trials = max_concurrent_trials + self.max_nodes = max_nodes + self.max_trials = max_trials + self.timeout = timeout + self.trial_timeout = trial_timeout + + +class NodeStateCounts(_serialization.Model): + """Counts of various compute node states on the amlCompute. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar idle_node_count: Number of compute nodes in idle state. + :vartype idle_node_count: int + :ivar running_node_count: Number of compute nodes which are running jobs. + :vartype running_node_count: int + :ivar preparing_node_count: Number of compute nodes which are being prepared. + :vartype preparing_node_count: int + :ivar unusable_node_count: Number of compute nodes which are in unusable state. + :vartype unusable_node_count: int + :ivar leaving_node_count: Number of compute nodes which are leaving the amlCompute. + :vartype leaving_node_count: int + :ivar preempted_node_count: Number of compute nodes which are in preempted state. + :vartype preempted_node_count: int + """ + + _validation = { + "idle_node_count": {"readonly": True}, + "running_node_count": {"readonly": True}, + "preparing_node_count": {"readonly": True}, + "unusable_node_count": {"readonly": True}, + "leaving_node_count": {"readonly": True}, + "preempted_node_count": {"readonly": True}, + } + + _attribute_map = { + "idle_node_count": {"key": "idleNodeCount", "type": "int"}, + "running_node_count": {"key": "runningNodeCount", "type": "int"}, + "preparing_node_count": {"key": "preparingNodeCount", "type": "int"}, + "unusable_node_count": {"key": "unusableNodeCount", "type": "int"}, + "leaving_node_count": {"key": "leavingNodeCount", "type": "int"}, + "preempted_node_count": {"key": "preemptedNodeCount", "type": "int"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.idle_node_count = None + self.running_node_count = None + self.preparing_node_count = None + self.unusable_node_count = None + self.leaving_node_count = None + self.preempted_node_count = None -class MLTableJobInput(AssetJobInput, JobInput): - """MLTableJobInput. +class NoneAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): + """NoneAuthTypeWorkspaceConnectionProperties. All required parameters must be populated in order to send to Azure. - :ivar description: Description for the input. - :vartype description: str - :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: - "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and - "triton_model". - :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", - "Download", "Direct", "EvalMount", and "EvalDownload". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :ivar uri: [Required] Input Asset URI. Required. - :vartype uri: str + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", "AccessKey", + "ApiKey", and "CustomKeys". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", + "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", + "CognitiveSearch", "CognitiveService", and "CustomKeys". + :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar expiry_time: + :vartype expiry_time: ~datetime.datetime + :ivar metadata: Any object. + :vartype metadata: JSON + :ivar target: + :vartype target: str """ _validation = { - "job_input_type": {"required": True}, - "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "auth_type": {"required": True}, } _attribute_map = { - "description": {"key": "description", "type": "str"}, - "job_input_type": {"key": "jobInputType", "type": "str"}, - "mode": {"key": "mode", "type": "str"}, - "uri": {"key": "uri", "type": "str"}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "metadata": {"key": "metadata", "type": "object"}, + "target": {"key": "target", "type": "str"}, } def __init__( self, *, - uri: str, - description: Optional[str] = None, - mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, + expiry_time: Optional[datetime.datetime] = None, + metadata: Optional[JSON] = None, + target: Optional[str] = None, **kwargs: Any ) -> None: """ - :keyword description: Description for the input. - :paramtype description: str - :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", - "Download", "Direct", "EvalMount", and "EvalDownload". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :keyword uri: [Required] Input Asset URI. Required. - :paramtype uri: str + :keyword category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", + "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", + "CognitiveSearch", "CognitiveService", and "CustomKeys". + :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :keyword expiry_time: + :paramtype expiry_time: ~datetime.datetime + :keyword metadata: Any object. + :paramtype metadata: JSON + :keyword target: + :paramtype target: str """ - super().__init__(mode=mode, uri=uri, description=description, **kwargs) - self.description = description - self.job_input_type: str = "mltable" - self.mode = mode - self.uri = uri + super().__init__(category=category, expiry_time=expiry_time, metadata=metadata, target=target, **kwargs) + self.auth_type: str = "None" -class MLTableJobOutput(AssetJobOutput, JobOutput): - """MLTableJobOutput. +class NoneDatastoreCredentials(DatastoreCredentials): + """Empty/none datastore credentials. All required parameters must be populated in order to send to Azure. - :ivar description: Description for the output. - :vartype description: str - :ivar job_output_type: [Required] Specifies the type of job. Required. Known values are: - "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". - :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType - :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount" and "Upload". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode - :ivar uri: Output Asset URI. - :vartype uri: str + :ivar credentials_type: [Required] Credential type used to authentication with storage. + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". + :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType """ _validation = { - "job_output_type": {"required": True}, + "credentials_type": {"required": True}, } _attribute_map = { - "description": {"key": "description", "type": "str"}, - "job_output_type": {"key": "jobOutputType", "type": "str"}, - "mode": {"key": "mode", "type": "str"}, - "uri": {"key": "uri", "type": "str"}, + "credentials_type": {"key": "credentialsType", "type": "str"}, } - def __init__( - self, - *, - description: Optional[str] = None, - mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, - uri: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword description: Description for the output. - :paramtype description: str - :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount" and "Upload". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode - :keyword uri: Output Asset URI. - :paramtype uri: str - """ - super().__init__(mode=mode, uri=uri, description=description, **kwargs) - self.description = description - self.job_output_type: str = "mltable" - self.mode = mode - self.uri = uri + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.credentials_type: str = "None" -class ModelContainer(Resource): - """Azure Resource Manager resource envelope. +class NotebookAccessTokenResult(_serialization.Model): + """NotebookAccessTokenResult. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: [Required] Additional attributes of the entity. Required. - :vartype properties: ~azure.mgmt.machinelearningservices.models.ModelContainerProperties + :ivar access_token: + :vartype access_token: str + :ivar expires_in: + :vartype expires_in: int + :ivar host_name: + :vartype host_name: str + :ivar notebook_resource_id: + :vartype notebook_resource_id: str + :ivar public_dns: + :vartype public_dns: str + :ivar refresh_token: + :vartype refresh_token: str + :ivar scope: + :vartype scope: str + :ivar token_type: + :vartype token_type: str """ _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "system_data": {"readonly": True}, - "properties": {"required": True}, + "access_token": {"readonly": True}, + "expires_in": {"readonly": True}, + "host_name": {"readonly": True}, + "notebook_resource_id": {"readonly": True}, + "public_dns": {"readonly": True}, + "refresh_token": {"readonly": True}, + "scope": {"readonly": True}, + "token_type": {"readonly": True}, } _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "system_data": {"key": "systemData", "type": "SystemData"}, - "properties": {"key": "properties", "type": "ModelContainerProperties"}, + "access_token": {"key": "accessToken", "type": "str"}, + "expires_in": {"key": "expiresIn", "type": "int"}, + "host_name": {"key": "hostName", "type": "str"}, + "notebook_resource_id": {"key": "notebookResourceId", "type": "str"}, + "public_dns": {"key": "publicDns", "type": "str"}, + "refresh_token": {"key": "refreshToken", "type": "str"}, + "scope": {"key": "scope", "type": "str"}, + "token_type": {"key": "tokenType", "type": "str"}, } - def __init__(self, *, properties: "_models.ModelContainerProperties", **kwargs: Any) -> None: - """ - :keyword properties: [Required] Additional attributes of the entity. Required. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.ModelContainerProperties - """ + def __init__(self, **kwargs: Any) -> None: + """ """ super().__init__(**kwargs) - self.properties = properties - + self.access_token = None + self.expires_in = None + self.host_name = None + self.notebook_resource_id = None + self.public_dns = None + self.refresh_token = None + self.scope = None + self.token_type = None -class ModelContainerProperties(AssetContainer): - """ModelContainerProperties. - Variables are only populated by the server, and will be ignored when sending a request. +class NotebookPreparationError(_serialization.Model): + """NotebookPreparationError. - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar is_archived: Is the asset archived?. - :vartype is_archived: bool - :ivar latest_version: The latest version inside this container. - :vartype latest_version: str - :ivar next_version: The next auto incremental version. - :vartype next_version: str - :ivar provisioning_state: Provisioning state for the model container. Known values are: - "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.AssetProvisioningState + :ivar error_message: + :vartype error_message: str + :ivar status_code: + :vartype status_code: int """ - _validation = { - "latest_version": {"readonly": True}, - "next_version": {"readonly": True}, - "provisioning_state": {"readonly": True}, + _attribute_map = { + "error_message": {"key": "errorMessage", "type": "str"}, + "status_code": {"key": "statusCode", "type": "int"}, } + def __init__( + self, *, error_message: Optional[str] = None, status_code: Optional[int] = None, **kwargs: Any + ) -> None: + """ + :keyword error_message: + :paramtype error_message: str + :keyword status_code: + :paramtype status_code: int + """ + super().__init__(**kwargs) + self.error_message = error_message + self.status_code = status_code + + +class NotebookResourceInfo(_serialization.Model): + """NotebookResourceInfo. + + :ivar fqdn: + :vartype fqdn: str + :ivar is_private_link_enabled: + :vartype is_private_link_enabled: bool + :ivar notebook_preparation_error: The error that occurs when preparing notebook. + :vartype notebook_preparation_error: + ~azure.mgmt.machinelearningservices.models.NotebookPreparationError + :ivar resource_id: the data plane resourceId that used to initialize notebook component. + :vartype resource_id: str + """ + _attribute_map = { - "description": {"key": "description", "type": "str"}, - "properties": {"key": "properties", "type": "{str}"}, - "tags": {"key": "tags", "type": "{str}"}, - "is_archived": {"key": "isArchived", "type": "bool"}, - "latest_version": {"key": "latestVersion", "type": "str"}, - "next_version": {"key": "nextVersion", "type": "str"}, - "provisioning_state": {"key": "provisioningState", "type": "str"}, + "fqdn": {"key": "fqdn", "type": "str"}, + "is_private_link_enabled": {"key": "isPrivateLinkEnabled", "type": "bool"}, + "notebook_preparation_error": {"key": "notebookPreparationError", "type": "NotebookPreparationError"}, + "resource_id": {"key": "resourceId", "type": "str"}, } def __init__( self, *, - description: Optional[str] = None, - properties: Optional[Dict[str, str]] = None, - tags: Optional[Dict[str, str]] = None, - is_archived: bool = False, + fqdn: Optional[str] = None, + is_private_link_enabled: Optional[bool] = None, + notebook_preparation_error: Optional["_models.NotebookPreparationError"] = None, + resource_id: Optional[str] = None, **kwargs: Any ) -> None: """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword is_archived: Is the asset archived?. - :paramtype is_archived: bool + :keyword fqdn: + :paramtype fqdn: str + :keyword is_private_link_enabled: + :paramtype is_private_link_enabled: bool + :keyword notebook_preparation_error: The error that occurs when preparing notebook. + :paramtype notebook_preparation_error: + ~azure.mgmt.machinelearningservices.models.NotebookPreparationError + :keyword resource_id: the data plane resourceId that used to initialize notebook component. + :paramtype resource_id: str """ - super().__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) - self.provisioning_state = None + super().__init__(**kwargs) + self.fqdn = fqdn + self.is_private_link_enabled = is_private_link_enabled + self.notebook_preparation_error = notebook_preparation_error + self.resource_id = resource_id -class ModelContainerResourceArmPaginatedResult(_serialization.Model): - """A paginated list of ModelContainer entities. +class NotificationSetting(_serialization.Model): + """Configuration for notification. - :ivar next_link: The link to the next page of ModelContainer objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type ModelContainer. - :vartype value: list[~azure.mgmt.machinelearningservices.models.ModelContainer] + :ivar email_on: Send email notification to user on specified notification type. + :vartype email_on: list[str or + ~azure.mgmt.machinelearningservices.models.EmailNotificationEnableType] + :ivar emails: This is the email recipient list which has a limitation of 499 characters in + total concat with comma separator. + :vartype emails: list[str] + :ivar webhooks: Send webhook callback to a service. Key is a user-provided name for the + webhook. + :vartype webhooks: dict[str, ~azure.mgmt.machinelearningservices.models.Webhook] """ _attribute_map = { - "next_link": {"key": "nextLink", "type": "str"}, - "value": {"key": "value", "type": "[ModelContainer]"}, + "email_on": {"key": "emailOn", "type": "[str]"}, + "emails": {"key": "emails", "type": "[str]"}, + "webhooks": {"key": "webhooks", "type": "{Webhook}"}, } def __init__( - self, *, next_link: Optional[str] = None, value: Optional[List["_models.ModelContainer"]] = None, **kwargs: Any + self, + *, + email_on: Optional[List[Union[str, "_models.EmailNotificationEnableType"]]] = None, + emails: Optional[List[str]] = None, + webhooks: Optional[Dict[str, "_models.Webhook"]] = None, + **kwargs: Any ) -> None: """ - :keyword next_link: The link to the next page of ModelContainer objects. If null, there are no - additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type ModelContainer. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.ModelContainer] + :keyword email_on: Send email notification to user on specified notification type. + :paramtype email_on: list[str or + ~azure.mgmt.machinelearningservices.models.EmailNotificationEnableType] + :keyword emails: This is the email recipient list which has a limitation of 499 characters in + total concat with comma separator. + :paramtype emails: list[str] + :keyword webhooks: Send webhook callback to a service. Key is a user-provided name for the + webhook. + :paramtype webhooks: dict[str, ~azure.mgmt.machinelearningservices.models.Webhook] """ super().__init__(**kwargs) - self.next_link = next_link - self.value = value - + self.email_on = email_on + self.emails = emails + self.webhooks = webhooks -class ModelVersion(Resource): - """Azure Resource Manager resource envelope. - Variables are only populated by the server, and will be ignored when sending a request. +class NumericalDataDriftMetricThreshold(DataDriftMetricThresholdBase): + """NumericalDataDriftMetricThreshold. All required parameters must be populated in order to send to Azure. - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: [Required] Additional attributes of the entity. Required. - :vartype properties: ~azure.mgmt.machinelearningservices.models.ModelVersionProperties + :ivar data_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Numerical" and "Categorical". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType + :ivar threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + :ivar metric: [Required] The numerical data drift metric to calculate. Required. Known values + are: "JensenShannonDistance", "PopulationStabilityIndex", "NormalizedWassersteinDistance", and + "TwoSampleKolmogorovSmirnovTest". + :vartype metric: str or ~azure.mgmt.machinelearningservices.models.NumericalDataDriftMetric """ _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "system_data": {"readonly": True}, - "properties": {"required": True}, + "data_type": {"required": True}, + "metric": {"required": True}, } _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "system_data": {"key": "systemData", "type": "SystemData"}, - "properties": {"key": "properties", "type": "ModelVersionProperties"}, + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, } - def __init__(self, *, properties: "_models.ModelVersionProperties", **kwargs: Any) -> None: + def __init__( + self, + *, + metric: Union[str, "_models.NumericalDataDriftMetric"], + threshold: Optional["_models.MonitoringThreshold"] = None, + **kwargs: Any + ) -> None: """ - :keyword properties: [Required] Additional attributes of the entity. Required. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.ModelVersionProperties + :keyword threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + :keyword metric: [Required] The numerical data drift metric to calculate. Required. Known + values are: "JensenShannonDistance", "PopulationStabilityIndex", + "NormalizedWassersteinDistance", and "TwoSampleKolmogorovSmirnovTest". + :paramtype metric: str or ~azure.mgmt.machinelearningservices.models.NumericalDataDriftMetric """ - super().__init__(**kwargs) - self.properties = properties + super().__init__(threshold=threshold, **kwargs) + self.data_type: str = "Numerical" + self.metric = metric -class ModelVersionProperties(AssetBase): # pylint: disable=too-many-instance-attributes - """Model asset version details. +class NumericalDataQualityMetricThreshold(DataQualityMetricThresholdBase): + """NumericalDataQualityMetricThreshold. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar is_anonymous: If the name version are system generated (anonymous registration). - :vartype is_anonymous: bool - :ivar is_archived: Is the asset archived?. - :vartype is_archived: bool - :ivar flavors: Mapping of model flavors to their properties. - :vartype flavors: dict[str, ~azure.mgmt.machinelearningservices.models.FlavorData] - :ivar job_name: Name of the training job which produced this model. - :vartype job_name: str - :ivar model_type: The storage format for this entity. Used for NCD. - :vartype model_type: str - :ivar model_uri: The URI path to the model contents. - :vartype model_uri: str - :ivar provisioning_state: Provisioning state for the model version. Known values are: - "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.AssetProvisioningState - :ivar stage: Stage in the model lifecycle assigned to this model. - :vartype stage: str + :ivar data_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Numerical" and "Categorical". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType + :ivar threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + :ivar metric: [Required] The numerical data quality metric to calculate. Required. Known values + are: "NullValueRate", "DataTypeErrorRate", and "OutOfBoundsRate". + :vartype metric: str or ~azure.mgmt.machinelearningservices.models.NumericalDataQualityMetric """ _validation = { - "provisioning_state": {"readonly": True}, + "data_type": {"required": True}, + "metric": {"required": True}, } _attribute_map = { - "description": {"key": "description", "type": "str"}, - "properties": {"key": "properties", "type": "{str}"}, - "tags": {"key": "tags", "type": "{str}"}, - "is_anonymous": {"key": "isAnonymous", "type": "bool"}, - "is_archived": {"key": "isArchived", "type": "bool"}, - "flavors": {"key": "flavors", "type": "{FlavorData}"}, - "job_name": {"key": "jobName", "type": "str"}, - "model_type": {"key": "modelType", "type": "str"}, - "model_uri": {"key": "modelUri", "type": "str"}, - "provisioning_state": {"key": "provisioningState", "type": "str"}, - "stage": {"key": "stage", "type": "str"}, + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, } def __init__( self, *, - description: Optional[str] = None, - properties: Optional[Dict[str, str]] = None, - tags: Optional[Dict[str, str]] = None, - is_anonymous: bool = False, - is_archived: bool = False, - flavors: Optional[Dict[str, "_models.FlavorData"]] = None, - job_name: Optional[str] = None, - model_type: Optional[str] = None, - model_uri: Optional[str] = None, - stage: Optional[str] = None, + metric: Union[str, "_models.NumericalDataQualityMetric"], + threshold: Optional["_models.MonitoringThreshold"] = None, **kwargs: Any ) -> None: """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword is_anonymous: If the name version are system generated (anonymous registration). - :paramtype is_anonymous: bool - :keyword is_archived: Is the asset archived?. - :paramtype is_archived: bool - :keyword flavors: Mapping of model flavors to their properties. - :paramtype flavors: dict[str, ~azure.mgmt.machinelearningservices.models.FlavorData] - :keyword job_name: Name of the training job which produced this model. - :paramtype job_name: str - :keyword model_type: The storage format for this entity. Used for NCD. - :paramtype model_type: str - :keyword model_uri: The URI path to the model contents. - :paramtype model_uri: str - :keyword stage: Stage in the model lifecycle assigned to this model. - :paramtype stage: str + :keyword threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + :keyword metric: [Required] The numerical data quality metric to calculate. Required. Known + values are: "NullValueRate", "DataTypeErrorRate", and "OutOfBoundsRate". + :paramtype metric: str or ~azure.mgmt.machinelearningservices.models.NumericalDataQualityMetric """ - super().__init__( - description=description, - properties=properties, - tags=tags, - is_anonymous=is_anonymous, - is_archived=is_archived, - **kwargs - ) - self.flavors = flavors - self.job_name = job_name - self.model_type = model_type - self.model_uri = model_uri - self.provisioning_state = None - self.stage = stage + super().__init__(threshold=threshold, **kwargs) + self.data_type: str = "Numerical" + self.metric = metric -class ModelVersionResourceArmPaginatedResult(_serialization.Model): - """A paginated list of ModelVersion entities. +class NumericalPredictionDriftMetricThreshold(PredictionDriftMetricThresholdBase): + """NumericalPredictionDriftMetricThreshold. - :ivar next_link: The link to the next page of ModelVersion objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type ModelVersion. - :vartype value: list[~azure.mgmt.machinelearningservices.models.ModelVersion] + All required parameters must be populated in order to send to Azure. + + :ivar data_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Numerical" and "Categorical". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType + :ivar threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + :ivar metric: [Required] The numerical prediction drift metric to calculate. Required. Known + values are: "JensenShannonDistance", "PopulationStabilityIndex", + "NormalizedWassersteinDistance", and "TwoSampleKolmogorovSmirnovTest". + :vartype metric: str or + ~azure.mgmt.machinelearningservices.models.NumericalPredictionDriftMetric """ + _validation = { + "data_type": {"required": True}, + "metric": {"required": True}, + } + _attribute_map = { - "next_link": {"key": "nextLink", "type": "str"}, - "value": {"key": "value", "type": "[ModelVersion]"}, + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, } def __init__( - self, *, next_link: Optional[str] = None, value: Optional[List["_models.ModelVersion"]] = None, **kwargs: Any + self, + *, + metric: Union[str, "_models.NumericalPredictionDriftMetric"], + threshold: Optional["_models.MonitoringThreshold"] = None, + **kwargs: Any ) -> None: """ - :keyword next_link: The link to the next page of ModelVersion objects. If null, there are no - additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type ModelVersion. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.ModelVersion] + :keyword threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + :keyword metric: [Required] The numerical prediction drift metric to calculate. Required. Known + values are: "JensenShannonDistance", "PopulationStabilityIndex", + "NormalizedWassersteinDistance", and "TwoSampleKolmogorovSmirnovTest". + :paramtype metric: str or + ~azure.mgmt.machinelearningservices.models.NumericalPredictionDriftMetric """ - super().__init__(**kwargs) - self.next_link = next_link - self.value = value + super().__init__(threshold=threshold, **kwargs) + self.data_type: str = "Numerical" + self.metric = metric -class Mpi(DistributionConfiguration): - """MPI distribution configuration. +class Objective(_serialization.Model): + """Optimization objective. All required parameters must be populated in order to send to Azure. - :ivar distribution_type: [Required] Specifies the type of distribution framework. Required. - Known values are: "PyTorch", "TensorFlow", and "Mpi". - :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType - :ivar process_count_per_instance: Number of processes per MPI node. - :vartype process_count_per_instance: int + :ivar goal: [Required] Defines supported metric goals for hyperparameter tuning. Required. + Known values are: "Minimize" and "Maximize". + :vartype goal: str or ~azure.mgmt.machinelearningservices.models.Goal + :ivar primary_metric: [Required] Name of the metric to optimize. Required. + :vartype primary_metric: str """ _validation = { - "distribution_type": {"required": True}, + "goal": {"required": True}, + "primary_metric": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - "distribution_type": {"key": "distributionType", "type": "str"}, - "process_count_per_instance": {"key": "processCountPerInstance", "type": "int"}, + "goal": {"key": "goal", "type": "str"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, } - def __init__(self, *, process_count_per_instance: Optional[int] = None, **kwargs: Any) -> None: + def __init__(self, *, goal: Union[str, "_models.Goal"], primary_metric: str, **kwargs: Any) -> None: """ - :keyword process_count_per_instance: Number of processes per MPI node. - :paramtype process_count_per_instance: int + :keyword goal: [Required] Defines supported metric goals for hyperparameter tuning. Required. + Known values are: "Minimize" and "Maximize". + :paramtype goal: str or ~azure.mgmt.machinelearningservices.models.Goal + :keyword primary_metric: [Required] Name of the metric to optimize. Required. + :paramtype primary_metric: str """ super().__init__(**kwargs) - self.distribution_type: str = "Mpi" - self.process_count_per_instance = process_count_per_instance + self.goal = goal + self.primary_metric = primary_metric -class NlpVertical(_serialization.Model): - """Abstract class for NLP related AutoML tasks. - NLP - Natural Language Processing. +class OneLakeDatastore(DatastoreProperties): # pylint: disable=too-many-instance-attributes + """OneLake (Trident) datastore configuration. - :ivar featurization_settings: Featurization inputs needed for AutoML job. - :vartype featurization_settings: - ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings - :ivar limit_settings: Execution constraints for AutoMLJob. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar credentials: [Required] Account credentials. Required. + :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :ivar datastore_type: [Required] Storage type backing the datastore. Required. Known values + are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile", "Hdfs", and "OneLake". + :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType + :ivar intellectual_property: Intellectual Property details. + :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty + :ivar is_default: Readonly property to indicate if datastore is the workspace default + datastore. + :vartype is_default: bool + :ivar artifact: [Required] OneLake artifact backing the datastore. Required. + :vartype artifact: ~azure.mgmt.machinelearningservices.models.OneLakeArtifact + :ivar endpoint: OneLake endpoint to use for the datastore. + :vartype endpoint: str + :ivar one_lake_workspace_name: [Required] OneLake workspace name. Required. + :vartype one_lake_workspace_name: str + :ivar service_data_access_auth_identity: Indicates which identity to use to authenticate + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". + :vartype service_data_access_auth_identity: str or + ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity """ + _validation = { + "credentials": {"required": True}, + "datastore_type": {"required": True}, + "is_default": {"readonly": True}, + "artifact": {"required": True}, + "one_lake_workspace_name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + } + _attribute_map = { - "featurization_settings": {"key": "featurizationSettings", "type": "NlpVerticalFeaturizationSettings"}, - "limit_settings": {"key": "limitSettings", "type": "NlpVerticalLimitSettings"}, - "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, + "datastore_type": {"key": "datastoreType", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "is_default": {"key": "isDefault", "type": "bool"}, + "artifact": {"key": "artifact", "type": "OneLakeArtifact"}, + "endpoint": {"key": "endpoint", "type": "str"}, + "one_lake_workspace_name": {"key": "oneLakeWorkspaceName", "type": "str"}, + "service_data_access_auth_identity": {"key": "serviceDataAccessAuthIdentity", "type": "str"}, } def __init__( self, *, - featurization_settings: Optional["_models.NlpVerticalFeaturizationSettings"] = None, - limit_settings: Optional["_models.NlpVerticalLimitSettings"] = None, - validation_data: Optional["_models.MLTableJobInput"] = None, + credentials: "_models.DatastoreCredentials", + artifact: "_models.OneLakeArtifact", + one_lake_workspace_name: str, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + intellectual_property: Optional["_models.IntellectualProperty"] = None, + endpoint: Optional[str] = None, + service_data_access_auth_identity: Optional[Union[str, "_models.ServiceDataAccessAuthIdentity"]] = None, **kwargs: Any ) -> None: """ - :keyword featurization_settings: Featurization inputs needed for AutoML job. - :paramtype featurization_settings: - ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings - :keyword limit_settings: Execution constraints for AutoMLJob. - :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword credentials: [Required] Account credentials. Required. + :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :keyword intellectual_property: Intellectual Property details. + :paramtype intellectual_property: + ~azure.mgmt.machinelearningservices.models.IntellectualProperty + :keyword artifact: [Required] OneLake artifact backing the datastore. Required. + :paramtype artifact: ~azure.mgmt.machinelearningservices.models.OneLakeArtifact + :keyword endpoint: OneLake endpoint to use for the datastore. + :paramtype endpoint: str + :keyword one_lake_workspace_name: [Required] OneLake workspace name. Required. + :paramtype one_lake_workspace_name: str + :keyword service_data_access_auth_identity: Indicates which identity to use to authenticate + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". + :paramtype service_data_access_auth_identity: str or + ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity """ - super().__init__(**kwargs) - self.featurization_settings = featurization_settings - self.limit_settings = limit_settings - self.validation_data = validation_data + super().__init__( + description=description, + properties=properties, + tags=tags, + credentials=credentials, + intellectual_property=intellectual_property, + **kwargs + ) + self.datastore_type: str = "OneLake" + self.artifact = artifact + self.endpoint = endpoint + self.one_lake_workspace_name = one_lake_workspace_name + self.service_data_access_auth_identity = service_data_access_auth_identity -class NlpVerticalFeaturizationSettings(FeaturizationSettings): - """NlpVerticalFeaturizationSettings. +class OnlineDeployment(TrackedResource): + """OnlineDeployment. - :ivar dataset_language: Dataset language, useful for the text data. - :vartype dataset_language: str + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar identity: Managed service identity (system assigned and/or user assigned identities). + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :vartype kind: str + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.OnlineDeploymentProperties + :ivar sku: Sku details required for ARM contract for Autoscaling. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku """ + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, + } + _attribute_map = { - "dataset_language": {"key": "datasetLanguage", "type": "str"}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "OnlineDeploymentProperties"}, + "sku": {"key": "sku", "type": "Sku"}, } - def __init__(self, *, dataset_language: Optional[str] = None, **kwargs: Any) -> None: + def __init__( + self, + *, + location: str, + properties: "_models.OnlineDeploymentProperties", + tags: Optional[Dict[str, str]] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, + kind: Optional[str] = None, + sku: Optional["_models.Sku"] = None, + **kwargs: Any + ) -> None: """ - :keyword dataset_language: Dataset language, useful for the text data. - :paramtype dataset_language: str + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + :keyword location: The geo-location where the resource lives. Required. + :paramtype location: str + :keyword identity: Managed service identity (system assigned and/or user assigned identities). + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :paramtype kind: str + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.OnlineDeploymentProperties + :keyword sku: Sku details required for ARM contract for Autoscaling. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku """ - super().__init__(dataset_language=dataset_language, **kwargs) + super().__init__(tags=tags, location=location, **kwargs) + self.identity = identity + self.kind = kind + self.properties = properties + self.sku = sku -class NlpVerticalLimitSettings(_serialization.Model): - """Job execution constraints. +class OnlineDeploymentTrackedResourceArmPaginatedResult(_serialization.Model): + """A paginated list of OnlineDeployment entities. - :ivar max_concurrent_trials: Maximum Concurrent AutoML iterations. - :vartype max_concurrent_trials: int - :ivar max_trials: Number of AutoML iterations. - :vartype max_trials: int - :ivar timeout: AutoML job timeout. - :vartype timeout: ~datetime.timedelta + :ivar next_link: The link to the next page of OnlineDeployment objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type OnlineDeployment. + :vartype value: list[~azure.mgmt.machinelearningservices.models.OnlineDeployment] """ _attribute_map = { - "max_concurrent_trials": {"key": "maxConcurrentTrials", "type": "int"}, - "max_trials": {"key": "maxTrials", "type": "int"}, - "timeout": {"key": "timeout", "type": "duration"}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[OnlineDeployment]"}, } def __init__( - self, *, max_concurrent_trials: int = 1, max_trials: int = 1, timeout: datetime.timedelta = "P7D", **kwargs: Any + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.OnlineDeployment"]] = None, + **kwargs: Any ) -> None: """ - :keyword max_concurrent_trials: Maximum Concurrent AutoML iterations. - :paramtype max_concurrent_trials: int - :keyword max_trials: Number of AutoML iterations. - :paramtype max_trials: int - :keyword timeout: AutoML job timeout. - :paramtype timeout: ~datetime.timedelta + :keyword next_link: The link to the next page of OnlineDeployment objects. If null, there are + no additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type OnlineDeployment. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.OnlineDeployment] """ super().__init__(**kwargs) - self.max_concurrent_trials = max_concurrent_trials - self.max_trials = max_trials - self.timeout = timeout + self.next_link = next_link + self.value = value -class NodeStateCounts(_serialization.Model): - """Counts of various compute node states on the amlCompute. +class OnlineEndpoint(TrackedResource): + """OnlineEndpoint. Variables are only populated by the server, and will be ignored when sending a request. - :ivar idle_node_count: Number of compute nodes in idle state. - :vartype idle_node_count: int - :ivar running_node_count: Number of compute nodes which are running jobs. - :vartype running_node_count: int - :ivar preparing_node_count: Number of compute nodes which are being prepared. - :vartype preparing_node_count: int - :ivar unusable_node_count: Number of compute nodes which are in unusable state. - :vartype unusable_node_count: int - :ivar leaving_node_count: Number of compute nodes which are leaving the amlCompute. - :vartype leaving_node_count: int - :ivar preempted_node_count: Number of compute nodes which are in preempted state. - :vartype preempted_node_count: int + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar identity: Managed service identity (system assigned and/or user assigned identities). + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :vartype kind: str + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.OnlineEndpointProperties + :ivar sku: Sku details required for ARM contract for Autoscaling. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku """ _validation = { - "idle_node_count": {"readonly": True}, - "running_node_count": {"readonly": True}, - "preparing_node_count": {"readonly": True}, - "unusable_node_count": {"readonly": True}, - "leaving_node_count": {"readonly": True}, - "preempted_node_count": {"readonly": True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "OnlineEndpointProperties"}, + "sku": {"key": "sku", "type": "Sku"}, } - _attribute_map = { - "idle_node_count": {"key": "idleNodeCount", "type": "int"}, - "running_node_count": {"key": "runningNodeCount", "type": "int"}, - "preparing_node_count": {"key": "preparingNodeCount", "type": "int"}, - "unusable_node_count": {"key": "unusableNodeCount", "type": "int"}, - "leaving_node_count": {"key": "leavingNodeCount", "type": "int"}, - "preempted_node_count": {"key": "preemptedNodeCount", "type": "int"}, - } + def __init__( + self, + *, + location: str, + properties: "_models.OnlineEndpointProperties", + tags: Optional[Dict[str, str]] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, + kind: Optional[str] = None, + sku: Optional["_models.Sku"] = None, + **kwargs: Any + ) -> None: + """ + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + :keyword location: The geo-location where the resource lives. Required. + :paramtype location: str + :keyword identity: Managed service identity (system assigned and/or user assigned identities). + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :paramtype kind: str + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.OnlineEndpointProperties + :keyword sku: Sku details required for ARM contract for Autoscaling. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + super().__init__(tags=tags, location=location, **kwargs) + self.identity = identity + self.kind = kind + self.properties = properties + self.sku = sku - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.idle_node_count = None - self.running_node_count = None - self.preparing_node_count = None - self.unusable_node_count = None - self.leaving_node_count = None - self.preempted_node_count = None +class OnlineEndpointProperties(EndpointPropertiesBase): # pylint: disable=too-many-instance-attributes + """Online endpoint configuration. -class NoneAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): - """NoneAuthTypeWorkspaceConnectionProperties. + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar auth_type: Authentication type of the connection target. Required. Known values are: - "PAT", "ManagedIdentity", "UsernamePassword", "None", and "SAS". - :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Known values are: "PythonFeed", - "ContainerRegistry", and "Git". - :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :ivar target: - :vartype target: str - :ivar value: Value details of the workspace connection. - :vartype value: str - :ivar value_format: format for the workspace connection value. "JSON" - :vartype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :ivar auth_mode: [Required] Use 'Key' for key based authentication and 'AMLToken' for Azure + Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. + Required. Known values are: "AMLToken", "Key", and "AADToken". + :vartype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode + :ivar description: Description of the inference endpoint. + :vartype description: str + :ivar keys: EndpointAuthKeys to set initially on an Endpoint. + This property will always be returned as null. AuthKey values must be retrieved using the + ListKeys API. + :vartype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar scoring_uri: Endpoint URI for the inference endpoint. + :vartype scoring_uri: str + :ivar swagger_uri: Endpoint Swagger URI. + :vartype swagger_uri: str + :ivar compute: ARM resource ID of the compute if it exists. + optional. + :vartype compute: str + :ivar mirror_traffic: Percentage of traffic to be mirrored to each deployment without using + returned scoring. Traffic values need to sum to utmost 50. + :vartype mirror_traffic: dict[str, int] + :ivar provisioning_state: Provisioning state for the endpoint. Known values are: "Creating", + "Deleting", "Succeeded", "Failed", "Updating", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.EndpointProvisioningState + :ivar public_network_access: Set to "Enabled" for endpoints that should allow public access + when Private Link is enabled. Known values are: "Enabled" and "Disabled". + :vartype public_network_access: str or + ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType + :ivar traffic: Percentage of traffic from endpoint to divert to each deployment. Traffic values + need to sum to 100. + :vartype traffic: dict[str, int] """ _validation = { - "auth_type": {"required": True}, + "auth_mode": {"required": True}, + "scoring_uri": {"readonly": True}, + "swagger_uri": {"readonly": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - "auth_type": {"key": "authType", "type": "str"}, - "category": {"key": "category", "type": "str"}, - "target": {"key": "target", "type": "str"}, - "value": {"key": "value", "type": "str"}, - "value_format": {"key": "valueFormat", "type": "str"}, + "auth_mode": {"key": "authMode", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "keys": {"key": "keys", "type": "EndpointAuthKeys"}, + "properties": {"key": "properties", "type": "{str}"}, + "scoring_uri": {"key": "scoringUri", "type": "str"}, + "swagger_uri": {"key": "swaggerUri", "type": "str"}, + "compute": {"key": "compute", "type": "str"}, + "mirror_traffic": {"key": "mirrorTraffic", "type": "{int}"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "public_network_access": {"key": "publicNetworkAccess", "type": "str"}, + "traffic": {"key": "traffic", "type": "{int}"}, } def __init__( self, *, - category: Optional[Union[str, "_models.ConnectionCategory"]] = None, - target: Optional[str] = None, - value: Optional[str] = None, - value_format: Optional[Union[str, "_models.ValueFormat"]] = None, + auth_mode: Union[str, "_models.EndpointAuthMode"], + description: Optional[str] = None, + keys: Optional["_models.EndpointAuthKeys"] = None, + properties: Optional[Dict[str, str]] = None, + compute: Optional[str] = None, + mirror_traffic: Optional[Dict[str, int]] = None, + public_network_access: Optional[Union[str, "_models.PublicNetworkAccessType"]] = None, + traffic: Optional[Dict[str, int]] = None, **kwargs: Any ) -> None: """ - :keyword category: Category of the connection. Known values are: "PythonFeed", - "ContainerRegistry", and "Git". - :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :keyword target: - :paramtype target: str - :keyword value: Value details of the workspace connection. - :paramtype value: str - :keyword value_format: format for the workspace connection value. "JSON" - :paramtype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :keyword auth_mode: [Required] Use 'Key' for key based authentication and 'AMLToken' for Azure + Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. + Required. Known values are: "AMLToken", "Key", and "AADToken". + :paramtype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode + :keyword description: Description of the inference endpoint. + :paramtype description: str + :keyword keys: EndpointAuthKeys to set initially on an Endpoint. + This property will always be returned as null. AuthKey values must be retrieved using the + ListKeys API. + :paramtype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword compute: ARM resource ID of the compute if it exists. + optional. + :paramtype compute: str + :keyword mirror_traffic: Percentage of traffic to be mirrored to each deployment without using + returned scoring. Traffic values need to sum to utmost 50. + :paramtype mirror_traffic: dict[str, int] + :keyword public_network_access: Set to "Enabled" for endpoints that should allow public access + when Private Link is enabled. Known values are: "Enabled" and "Disabled". + :paramtype public_network_access: str or + ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType + :keyword traffic: Percentage of traffic from endpoint to divert to each deployment. Traffic + values need to sum to 100. + :paramtype traffic: dict[str, int] """ - super().__init__(category=category, target=target, value=value, value_format=value_format, **kwargs) - self.auth_type: str = "None" - + super().__init__(auth_mode=auth_mode, description=description, keys=keys, properties=properties, **kwargs) + self.compute = compute + self.mirror_traffic = mirror_traffic + self.provisioning_state = None + self.public_network_access = public_network_access + self.traffic = traffic -class NoneDatastoreCredentials(DatastoreCredentials): - """Empty/none datastore credentials. - All required parameters must be populated in order to send to Azure. +class OnlineEndpointTrackedResourceArmPaginatedResult(_serialization.Model): + """A paginated list of OnlineEndpoint entities. - :ivar credentials_type: [Required] Credential type used to authentication with storage. - Required. Known values are: "AccountKey", "Certificate", "None", "Sas", and "ServicePrincipal". - :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType + :ivar next_link: The link to the next page of OnlineEndpoint objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type OnlineEndpoint. + :vartype value: list[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] """ - _validation = { - "credentials_type": {"required": True}, - } - _attribute_map = { - "credentials_type": {"key": "credentialsType", "type": "str"}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[OnlineEndpoint]"}, } - def __init__(self, **kwargs: Any) -> None: - """ """ + def __init__( + self, *, next_link: Optional[str] = None, value: Optional[List["_models.OnlineEndpoint"]] = None, **kwargs: Any + ) -> None: + """ + :keyword next_link: The link to the next page of OnlineEndpoint objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type OnlineEndpoint. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + """ super().__init__(**kwargs) - self.credentials_type: str = "None" - + self.next_link = next_link + self.value = value -class NotebookAccessTokenResult(_serialization.Model): - """NotebookAccessTokenResult. - Variables are only populated by the server, and will be ignored when sending a request. +class OnlineInferenceConfiguration(_serialization.Model): + """Online inference configuration options. - :ivar notebook_resource_id: - :vartype notebook_resource_id: str - :ivar host_name: - :vartype host_name: str - :ivar public_dns: - :vartype public_dns: str - :ivar access_token: - :vartype access_token: str - :ivar token_type: - :vartype token_type: str - :ivar expires_in: - :vartype expires_in: int - :ivar refresh_token: - :vartype refresh_token: str - :ivar scope: - :vartype scope: str + :ivar configurations: Additional configurations. + :vartype configurations: dict[str, str] + :ivar entry_script: Entry script or command to invoke. + :vartype entry_script: str + :ivar liveness_route: The route to check the liveness of the inference server container. + :vartype liveness_route: ~azure.mgmt.machinelearningservices.models.Route + :ivar readiness_route: The route to check the readiness of the inference server container. + :vartype readiness_route: ~azure.mgmt.machinelearningservices.models.Route + :ivar scoring_route: The port to send the scoring requests to, within the inference server + container. + :vartype scoring_route: ~azure.mgmt.machinelearningservices.models.Route """ - _validation = { - "notebook_resource_id": {"readonly": True}, - "host_name": {"readonly": True}, - "public_dns": {"readonly": True}, - "access_token": {"readonly": True}, - "token_type": {"readonly": True}, - "expires_in": {"readonly": True}, - "refresh_token": {"readonly": True}, - "scope": {"readonly": True}, - } - _attribute_map = { - "notebook_resource_id": {"key": "notebookResourceId", "type": "str"}, - "host_name": {"key": "hostName", "type": "str"}, - "public_dns": {"key": "publicDns", "type": "str"}, - "access_token": {"key": "accessToken", "type": "str"}, - "token_type": {"key": "tokenType", "type": "str"}, - "expires_in": {"key": "expiresIn", "type": "int"}, - "refresh_token": {"key": "refreshToken", "type": "str"}, - "scope": {"key": "scope", "type": "str"}, + "configurations": {"key": "configurations", "type": "{str}"}, + "entry_script": {"key": "entryScript", "type": "str"}, + "liveness_route": {"key": "livenessRoute", "type": "Route"}, + "readiness_route": {"key": "readinessRoute", "type": "Route"}, + "scoring_route": {"key": "scoringRoute", "type": "Route"}, } - def __init__(self, **kwargs: Any) -> None: - """ """ + def __init__( + self, + *, + configurations: Optional[Dict[str, str]] = None, + entry_script: Optional[str] = None, + liveness_route: Optional["_models.Route"] = None, + readiness_route: Optional["_models.Route"] = None, + scoring_route: Optional["_models.Route"] = None, + **kwargs: Any + ) -> None: + """ + :keyword configurations: Additional configurations. + :paramtype configurations: dict[str, str] + :keyword entry_script: Entry script or command to invoke. + :paramtype entry_script: str + :keyword liveness_route: The route to check the liveness of the inference server container. + :paramtype liveness_route: ~azure.mgmt.machinelearningservices.models.Route + :keyword readiness_route: The route to check the readiness of the inference server container. + :paramtype readiness_route: ~azure.mgmt.machinelearningservices.models.Route + :keyword scoring_route: The port to send the scoring requests to, within the inference server + container. + :paramtype scoring_route: ~azure.mgmt.machinelearningservices.models.Route + """ super().__init__(**kwargs) - self.notebook_resource_id = None - self.host_name = None - self.public_dns = None - self.access_token = None - self.token_type = None - self.expires_in = None - self.refresh_token = None - self.scope = None + self.configurations = configurations + self.entry_script = entry_script + self.liveness_route = liveness_route + self.readiness_route = readiness_route + self.scoring_route = scoring_route -class NotebookPreparationError(_serialization.Model): - """NotebookPreparationError. +class OnlineRequestSettings(_serialization.Model): + """Online deployment scoring requests configuration. - :ivar error_message: - :vartype error_message: str - :ivar status_code: - :vartype status_code: int + :ivar max_concurrent_requests_per_instance: The number of maximum concurrent requests per node + allowed per deployment. Defaults to 1. + :vartype max_concurrent_requests_per_instance: int + :ivar max_queue_wait: The maximum amount of time a request will stay in the queue in ISO 8601 + format. + Defaults to 500ms. + :vartype max_queue_wait: ~datetime.timedelta + :ivar request_timeout: The scoring timeout in ISO 8601 format. + Defaults to 5000ms. + :vartype request_timeout: ~datetime.timedelta """ _attribute_map = { - "error_message": {"key": "errorMessage", "type": "str"}, - "status_code": {"key": "statusCode", "type": "int"}, + "max_concurrent_requests_per_instance": {"key": "maxConcurrentRequestsPerInstance", "type": "int"}, + "max_queue_wait": {"key": "maxQueueWait", "type": "duration"}, + "request_timeout": {"key": "requestTimeout", "type": "duration"}, } def __init__( - self, *, error_message: Optional[str] = None, status_code: Optional[int] = None, **kwargs: Any + self, + *, + max_concurrent_requests_per_instance: int = 1, + max_queue_wait: datetime.timedelta = "PT0.5S", + request_timeout: datetime.timedelta = "PT5S", + **kwargs: Any ) -> None: """ - :keyword error_message: - :paramtype error_message: str - :keyword status_code: - :paramtype status_code: int + :keyword max_concurrent_requests_per_instance: The number of maximum concurrent requests per + node allowed per deployment. Defaults to 1. + :paramtype max_concurrent_requests_per_instance: int + :keyword max_queue_wait: The maximum amount of time a request will stay in the queue in ISO + 8601 format. + Defaults to 500ms. + :paramtype max_queue_wait: ~datetime.timedelta + :keyword request_timeout: The scoring timeout in ISO 8601 format. + Defaults to 5000ms. + :paramtype request_timeout: ~datetime.timedelta """ super().__init__(**kwargs) - self.error_message = error_message - self.status_code = status_code + self.max_concurrent_requests_per_instance = max_concurrent_requests_per_instance + self.max_queue_wait = max_queue_wait + self.request_timeout = request_timeout -class NotebookResourceInfo(_serialization.Model): - """NotebookResourceInfo. +class OperationDisplay(_serialization.Model): + """Display name of operation. - :ivar fqdn: - :vartype fqdn: str - :ivar resource_id: the data plane resourceId that used to initialize notebook component. - :vartype resource_id: str - :ivar notebook_preparation_error: The error that occurs when preparing notebook. - :vartype notebook_preparation_error: - ~azure.mgmt.machinelearningservices.models.NotebookPreparationError + :ivar description: Gets or sets the description for the operation. + :vartype description: str + :ivar operation: Gets or sets the operation that users can perform. + :vartype operation: str + :ivar provider: Gets or sets the resource provider name: + Microsoft.MachineLearningExperimentation. + :vartype provider: str + :ivar resource: Gets or sets the resource on which the operation is performed. + :vartype resource: str """ _attribute_map = { - "fqdn": {"key": "fqdn", "type": "str"}, - "resource_id": {"key": "resourceId", "type": "str"}, - "notebook_preparation_error": {"key": "notebookPreparationError", "type": "NotebookPreparationError"}, + "description": {"key": "description", "type": "str"}, + "operation": {"key": "operation", "type": "str"}, + "provider": {"key": "provider", "type": "str"}, + "resource": {"key": "resource", "type": "str"}, } def __init__( self, *, - fqdn: Optional[str] = None, - resource_id: Optional[str] = None, - notebook_preparation_error: Optional["_models.NotebookPreparationError"] = None, + description: Optional[str] = None, + operation: Optional[str] = None, + provider: Optional[str] = None, + resource: Optional[str] = None, **kwargs: Any ) -> None: """ - :keyword fqdn: - :paramtype fqdn: str - :keyword resource_id: the data plane resourceId that used to initialize notebook component. - :paramtype resource_id: str - :keyword notebook_preparation_error: The error that occurs when preparing notebook. - :paramtype notebook_preparation_error: - ~azure.mgmt.machinelearningservices.models.NotebookPreparationError + :keyword description: Gets or sets the description for the operation. + :paramtype description: str + :keyword operation: Gets or sets the operation that users can perform. + :paramtype operation: str + :keyword provider: Gets or sets the resource provider name: + Microsoft.MachineLearningExperimentation. + :paramtype provider: str + :keyword resource: Gets or sets the resource on which the operation is performed. + :paramtype resource: str """ super().__init__(**kwargs) - self.fqdn = fqdn - self.resource_id = resource_id - self.notebook_preparation_error = notebook_preparation_error - + self.description = description + self.operation = operation + self.provider = provider + self.resource = resource -class Objective(_serialization.Model): - """Optimization objective. - All required parameters must be populated in order to send to Azure. +class OsPatchingStatus(_serialization.Model): + """Returns metadata about the os patching. - :ivar goal: [Required] Defines supported metric goals for hyperparameter tuning. Required. - Known values are: "Minimize" and "Maximize". - :vartype goal: str or ~azure.mgmt.machinelearningservices.models.Goal - :ivar primary_metric: [Required] Name of the metric to optimize. Required. - :vartype primary_metric: str + :ivar patch_status: The os patching status. Known values are: "CompletedWithWarnings", + "Failed", "InProgress", "Succeeded", and "Unknown". + :vartype patch_status: str or ~azure.mgmt.machinelearningservices.models.PatchStatus + :ivar latest_patch_time: Time of the latest os patching. + :vartype latest_patch_time: str + :ivar reboot_pending: Specifies whether this compute instance is pending for reboot to finish + os patching. + :vartype reboot_pending: bool + :ivar scheduled_reboot_time: Time of scheduled reboot. + :vartype scheduled_reboot_time: str """ - _validation = { - "goal": {"required": True}, - "primary_metric": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, - } - _attribute_map = { - "goal": {"key": "goal", "type": "str"}, - "primary_metric": {"key": "primaryMetric", "type": "str"}, + "patch_status": {"key": "patchStatus", "type": "str"}, + "latest_patch_time": {"key": "latestPatchTime", "type": "str"}, + "reboot_pending": {"key": "rebootPending", "type": "bool"}, + "scheduled_reboot_time": {"key": "scheduledRebootTime", "type": "str"}, } - def __init__(self, *, goal: Union[str, "_models.Goal"], primary_metric: str, **kwargs: Any) -> None: + def __init__( + self, + *, + patch_status: Optional[Union[str, "_models.PatchStatus"]] = None, + latest_patch_time: Optional[str] = None, + reboot_pending: Optional[bool] = None, + scheduled_reboot_time: Optional[str] = None, + **kwargs: Any + ) -> None: """ - :keyword goal: [Required] Defines supported metric goals for hyperparameter tuning. Required. - Known values are: "Minimize" and "Maximize". - :paramtype goal: str or ~azure.mgmt.machinelearningservices.models.Goal - :keyword primary_metric: [Required] Name of the metric to optimize. Required. - :paramtype primary_metric: str + :keyword patch_status: The os patching status. Known values are: "CompletedWithWarnings", + "Failed", "InProgress", "Succeeded", and "Unknown". + :paramtype patch_status: str or ~azure.mgmt.machinelearningservices.models.PatchStatus + :keyword latest_patch_time: Time of the latest os patching. + :paramtype latest_patch_time: str + :keyword reboot_pending: Specifies whether this compute instance is pending for reboot to + finish os patching. + :paramtype reboot_pending: bool + :keyword scheduled_reboot_time: Time of scheduled reboot. + :paramtype scheduled_reboot_time: str """ super().__init__(**kwargs) - self.goal = goal - self.primary_metric = primary_metric + self.patch_status = patch_status + self.latest_patch_time = latest_patch_time + self.reboot_pending = reboot_pending + self.scheduled_reboot_time = scheduled_reboot_time -class OnlineDeployment(TrackedResource): - """OnlineDeployment. +class OutboundRuleBasicResource(Resource): + """Outbound Rule Basic Resource for the managed network of a machine learning workspace. Variables are only populated by the server, and will be ignored when sending a request. @@ -15779,19 +24253,9 @@ class OnlineDeployment(TrackedResource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar tags: Resource tags. - :vartype tags: dict[str, str] - :ivar location: The geo-location where the resource lives. Required. - :vartype location: str - :ivar identity: Managed service identity (system assigned and/or user assigned identities). - :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for - resources of the same type. - :vartype kind: str - :ivar properties: [Required] Additional attributes of the entity. Required. - :vartype properties: ~azure.mgmt.machinelearningservices.models.OnlineDeploymentProperties - :ivar sku: Sku details required for ARM contract for Autoscaling. - :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + :ivar properties: Outbound Rule for the managed network of a machine learning workspace. + Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.OutboundRule """ _validation = { @@ -15799,7 +24263,6 @@ class OnlineDeployment(TrackedResource): "name": {"readonly": True}, "type": {"readonly": True}, "system_data": {"readonly": True}, - "location": {"required": True}, "properties": {"required": True}, } @@ -15808,392 +24271,386 @@ class OnlineDeployment(TrackedResource): "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "system_data": {"key": "systemData", "type": "SystemData"}, - "tags": {"key": "tags", "type": "{str}"}, - "location": {"key": "location", "type": "str"}, - "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, - "kind": {"key": "kind", "type": "str"}, - "properties": {"key": "properties", "type": "OnlineDeploymentProperties"}, - "sku": {"key": "sku", "type": "Sku"}, + "properties": {"key": "properties", "type": "OutboundRule"}, + } + + def __init__(self, *, properties: "_models.OutboundRule", **kwargs: Any) -> None: + """ + :keyword properties: Outbound Rule for the managed network of a machine learning workspace. + Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.OutboundRule + """ + super().__init__(**kwargs) + self.properties = properties + + +class OutboundRuleListResult(_serialization.Model): + """List of outbound rules for the managed network of a machine learning workspace. + + :ivar next_link: The link to the next page constructed using the continuationToken. If null, + there are no additional pages. + :vartype next_link: str + :ivar value: The list of machine learning workspaces. Since this list may be incomplete, the + nextLink field should be used to request the next list of machine learning workspaces. + :vartype value: list[~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[OutboundRuleBasicResource]"}, } def __init__( self, *, - location: str, - properties: "_models.OnlineDeploymentProperties", - tags: Optional[Dict[str, str]] = None, - identity: Optional["_models.ManagedServiceIdentity"] = None, - kind: Optional[str] = None, - sku: Optional["_models.Sku"] = None, + next_link: Optional[str] = None, + value: Optional[List["_models.OutboundRuleBasicResource"]] = None, **kwargs: Any ) -> None: """ - :keyword tags: Resource tags. - :paramtype tags: dict[str, str] - :keyword location: The geo-location where the resource lives. Required. - :paramtype location: str - :keyword identity: Managed service identity (system assigned and/or user assigned identities). - :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for - resources of the same type. - :paramtype kind: str - :keyword properties: [Required] Additional attributes of the entity. Required. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.OnlineDeploymentProperties - :keyword sku: Sku details required for ARM contract for Autoscaling. - :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + :keyword next_link: The link to the next page constructed using the continuationToken. If + null, there are no additional pages. + :paramtype next_link: str + :keyword value: The list of machine learning workspaces. Since this list may be incomplete, the + nextLink field should be used to request the next list of machine learning workspaces. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource] """ - super().__init__(tags=tags, location=location, **kwargs) - self.identity = identity - self.kind = kind - self.properties = properties - self.sku = sku + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class OutputPathAssetReference(AssetReferenceBase): + """Reference to an asset via its path in a job output. + + All required parameters must be populated in order to send to Azure. + + :ivar reference_type: [Required] Specifies the type of asset reference. Required. Known values + are: "Id", "DataPath", and "OutputPath". + :vartype reference_type: str or ~azure.mgmt.machinelearningservices.models.ReferenceType + :ivar job_id: ARM resource ID of the job. + :vartype job_id: str + :ivar path: The path of the file/directory in the job output. + :vartype path: str + """ + + _validation = { + "reference_type": {"required": True}, + } + + _attribute_map = { + "reference_type": {"key": "referenceType", "type": "str"}, + "job_id": {"key": "jobId", "type": "str"}, + "path": {"key": "path", "type": "str"}, + } + + def __init__(self, *, job_id: Optional[str] = None, path: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword job_id: ARM resource ID of the job. + :paramtype job_id: str + :keyword path: The path of the file/directory in the job output. + :paramtype path: str + """ + super().__init__(**kwargs) + self.reference_type: str = "OutputPath" + self.job_id = job_id + self.path = path + +class PackageInputPathBase(_serialization.Model): + """PackageInputPathBase. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + PackageInputPathId, PackageInputPathVersion, PackageInputPathUrl -class OnlineDeploymentTrackedResourceArmPaginatedResult(_serialization.Model): - """A paginated list of OnlineDeployment entities. + All required parameters must be populated in order to send to Azure. - :ivar next_link: The link to the next page of OnlineDeployment objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type OnlineDeployment. - :vartype value: list[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :ivar input_path_type: [Required] Input path type for package inputs. Required. Known values + are: "Url", "PathId", and "PathVersion". + :vartype input_path_type: str or ~azure.mgmt.machinelearningservices.models.InputPathType """ + _validation = { + "input_path_type": {"required": True}, + } + _attribute_map = { - "next_link": {"key": "nextLink", "type": "str"}, - "value": {"key": "value", "type": "[OnlineDeployment]"}, + "input_path_type": {"key": "inputPathType", "type": "str"}, } - def __init__( - self, - *, - next_link: Optional[str] = None, - value: Optional[List["_models.OnlineDeployment"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword next_link: The link to the next page of OnlineDeployment objects. If null, there are - no additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type OnlineDeployment. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.OnlineDeployment] - """ - super().__init__(**kwargs) - self.next_link = next_link - self.value = value + _subtype_map = { + "input_path_type": { + "PathId": "PackageInputPathId", + "PathVersion": "PackageInputPathVersion", + "Url": "PackageInputPathUrl", + } + } + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.input_path_type: Optional[str] = None -class OnlineEndpoint(TrackedResource): - """OnlineEndpoint. - Variables are only populated by the server, and will be ignored when sending a request. +class PackageInputPathId(PackageInputPathBase): + """Package input path specified with a resource id. All required parameters must be populated in order to send to Azure. - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar tags: Resource tags. - :vartype tags: dict[str, str] - :ivar location: The geo-location where the resource lives. Required. - :vartype location: str - :ivar identity: Managed service identity (system assigned and/or user assigned identities). - :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for - resources of the same type. - :vartype kind: str - :ivar properties: [Required] Additional attributes of the entity. Required. - :vartype properties: ~azure.mgmt.machinelearningservices.models.OnlineEndpointProperties - :ivar sku: Sku details required for ARM contract for Autoscaling. - :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + :ivar input_path_type: [Required] Input path type for package inputs. Required. Known values + are: "Url", "PathId", and "PathVersion". + :vartype input_path_type: str or ~azure.mgmt.machinelearningservices.models.InputPathType + :ivar resource_id: Input resource id. + :vartype resource_id: str """ _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "system_data": {"readonly": True}, - "location": {"required": True}, - "properties": {"required": True}, + "input_path_type": {"required": True}, } _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "system_data": {"key": "systemData", "type": "SystemData"}, - "tags": {"key": "tags", "type": "{str}"}, - "location": {"key": "location", "type": "str"}, - "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, - "kind": {"key": "kind", "type": "str"}, - "properties": {"key": "properties", "type": "OnlineEndpointProperties"}, - "sku": {"key": "sku", "type": "Sku"}, + "input_path_type": {"key": "inputPathType", "type": "str"}, + "resource_id": {"key": "resourceId", "type": "str"}, } - def __init__( - self, - *, - location: str, - properties: "_models.OnlineEndpointProperties", - tags: Optional[Dict[str, str]] = None, - identity: Optional["_models.ManagedServiceIdentity"] = None, - kind: Optional[str] = None, - sku: Optional["_models.Sku"] = None, - **kwargs: Any - ) -> None: + def __init__(self, *, resource_id: Optional[str] = None, **kwargs: Any) -> None: """ - :keyword tags: Resource tags. - :paramtype tags: dict[str, str] - :keyword location: The geo-location where the resource lives. Required. - :paramtype location: str - :keyword identity: Managed service identity (system assigned and/or user assigned identities). - :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for - resources of the same type. - :paramtype kind: str - :keyword properties: [Required] Additional attributes of the entity. Required. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.OnlineEndpointProperties - :keyword sku: Sku details required for ARM contract for Autoscaling. - :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + :keyword resource_id: Input resource id. + :paramtype resource_id: str """ - super().__init__(tags=tags, location=location, **kwargs) - self.identity = identity - self.kind = kind - self.properties = properties - self.sku = sku - + super().__init__(**kwargs) + self.input_path_type: str = "PathId" + self.resource_id = resource_id -class OnlineEndpointProperties(EndpointPropertiesBase): # pylint: disable=too-many-instance-attributes - """Online endpoint configuration. - Variables are only populated by the server, and will be ignored when sending a request. +class PackageInputPathUrl(PackageInputPathBase): + """Package input path specified as an url. All required parameters must be populated in order to send to Azure. - :ivar auth_mode: [Required] Use 'Key' for key based authentication and 'AMLToken' for Azure - Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. - Required. Known values are: "AMLToken", "Key", and "AADToken". - :vartype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode - :ivar description: Description of the inference endpoint. - :vartype description: str - :ivar keys: EndpointAuthKeys to set initially on an Endpoint. - This property will always be returned as null. AuthKey values must be retrieved using the - ListKeys API. - :vartype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys - :ivar properties: Property dictionary. Properties can be added, but not removed or altered. - :vartype properties: dict[str, str] - :ivar scoring_uri: Endpoint URI. - :vartype scoring_uri: str - :ivar swagger_uri: Endpoint Swagger URI. - :vartype swagger_uri: str - :ivar compute: ARM resource ID of the compute if it exists. - optional. - :vartype compute: str - :ivar mirror_traffic: Percentage of traffic to be mirrored to each deployment without using - returned scoring. Traffic values need to sum to utmost 50. - :vartype mirror_traffic: dict[str, int] - :ivar provisioning_state: Provisioning state for the endpoint. Known values are: "Creating", - "Deleting", "Succeeded", "Failed", "Updating", and "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.EndpointProvisioningState - :ivar public_network_access: Set to "Enabled" for endpoints that should allow public access - when Private Link is enabled. Known values are: "Enabled" and "Disabled". - :vartype public_network_access: str or - ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType - :ivar traffic: Percentage of traffic from endpoint to divert to each deployment. Traffic values - need to sum to 100. - :vartype traffic: dict[str, int] + :ivar input_path_type: [Required] Input path type for package inputs. Required. Known values + are: "Url", "PathId", and "PathVersion". + :vartype input_path_type: str or ~azure.mgmt.machinelearningservices.models.InputPathType + :ivar url: Input path url. + :vartype url: str """ _validation = { - "auth_mode": {"required": True}, - "scoring_uri": {"readonly": True}, - "swagger_uri": {"readonly": True}, - "provisioning_state": {"readonly": True}, + "input_path_type": {"required": True}, } _attribute_map = { - "auth_mode": {"key": "authMode", "type": "str"}, - "description": {"key": "description", "type": "str"}, - "keys": {"key": "keys", "type": "EndpointAuthKeys"}, - "properties": {"key": "properties", "type": "{str}"}, - "scoring_uri": {"key": "scoringUri", "type": "str"}, - "swagger_uri": {"key": "swaggerUri", "type": "str"}, - "compute": {"key": "compute", "type": "str"}, - "mirror_traffic": {"key": "mirrorTraffic", "type": "{int}"}, - "provisioning_state": {"key": "provisioningState", "type": "str"}, - "public_network_access": {"key": "publicNetworkAccess", "type": "str"}, - "traffic": {"key": "traffic", "type": "{int}"}, + "input_path_type": {"key": "inputPathType", "type": "str"}, + "url": {"key": "url", "type": "str"}, } - def __init__( - self, - *, - auth_mode: Union[str, "_models.EndpointAuthMode"], - description: Optional[str] = None, - keys: Optional["_models.EndpointAuthKeys"] = None, - properties: Optional[Dict[str, str]] = None, - compute: Optional[str] = None, - mirror_traffic: Optional[Dict[str, int]] = None, - public_network_access: Optional[Union[str, "_models.PublicNetworkAccessType"]] = None, - traffic: Optional[Dict[str, int]] = None, - **kwargs: Any - ) -> None: + def __init__(self, *, url: Optional[str] = None, **kwargs: Any) -> None: """ - :keyword auth_mode: [Required] Use 'Key' for key based authentication and 'AMLToken' for Azure - Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. - Required. Known values are: "AMLToken", "Key", and "AADToken". - :paramtype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode - :keyword description: Description of the inference endpoint. - :paramtype description: str - :keyword keys: EndpointAuthKeys to set initially on an Endpoint. - This property will always be returned as null. AuthKey values must be retrieved using the - ListKeys API. - :paramtype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys - :keyword properties: Property dictionary. Properties can be added, but not removed or altered. - :paramtype properties: dict[str, str] - :keyword compute: ARM resource ID of the compute if it exists. - optional. - :paramtype compute: str - :keyword mirror_traffic: Percentage of traffic to be mirrored to each deployment without using - returned scoring. Traffic values need to sum to utmost 50. - :paramtype mirror_traffic: dict[str, int] - :keyword public_network_access: Set to "Enabled" for endpoints that should allow public access - when Private Link is enabled. Known values are: "Enabled" and "Disabled". - :paramtype public_network_access: str or - ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType - :keyword traffic: Percentage of traffic from endpoint to divert to each deployment. Traffic - values need to sum to 100. - :paramtype traffic: dict[str, int] + :keyword url: Input path url. + :paramtype url: str """ - super().__init__(auth_mode=auth_mode, description=description, keys=keys, properties=properties, **kwargs) - self.compute = compute - self.mirror_traffic = mirror_traffic - self.provisioning_state = None - self.public_network_access = public_network_access - self.traffic = traffic + super().__init__(**kwargs) + self.input_path_type: str = "Url" + self.url = url -class OnlineEndpointTrackedResourceArmPaginatedResult(_serialization.Model): - """A paginated list of OnlineEndpoint entities. +class PackageInputPathVersion(PackageInputPathBase): + """Package input path specified with name and version. - :ivar next_link: The link to the next page of OnlineEndpoint objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type OnlineEndpoint. - :vartype value: list[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + All required parameters must be populated in order to send to Azure. + + :ivar input_path_type: [Required] Input path type for package inputs. Required. Known values + are: "Url", "PathId", and "PathVersion". + :vartype input_path_type: str or ~azure.mgmt.machinelearningservices.models.InputPathType + :ivar resource_name: Input resource name. + :vartype resource_name: str + :ivar resource_version: Input resource version. + :vartype resource_version: str """ + _validation = { + "input_path_type": {"required": True}, + } + _attribute_map = { - "next_link": {"key": "nextLink", "type": "str"}, - "value": {"key": "value", "type": "[OnlineEndpoint]"}, + "input_path_type": {"key": "inputPathType", "type": "str"}, + "resource_name": {"key": "resourceName", "type": "str"}, + "resource_version": {"key": "resourceVersion", "type": "str"}, } def __init__( - self, *, next_link: Optional[str] = None, value: Optional[List["_models.OnlineEndpoint"]] = None, **kwargs: Any + self, *, resource_name: Optional[str] = None, resource_version: Optional[str] = None, **kwargs: Any ) -> None: """ - :keyword next_link: The link to the next page of OnlineEndpoint objects. If null, there are no - additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type OnlineEndpoint. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :keyword resource_name: Input resource name. + :paramtype resource_name: str + :keyword resource_version: Input resource version. + :paramtype resource_version: str """ super().__init__(**kwargs) - self.next_link = next_link - self.value = value + self.input_path_type: str = "PathVersion" + self.resource_name = resource_name + self.resource_version = resource_version -class OnlineRequestSettings(_serialization.Model): - """Online deployment scoring requests configuration. +class PackageRequest(_serialization.Model): + """Model package operation request properties. - :ivar max_concurrent_requests_per_instance: The number of maximum concurrent requests per node - allowed per deployment. Defaults to 1. - :vartype max_concurrent_requests_per_instance: int - :ivar max_queue_wait: The maximum amount of time a request will stay in the queue in ISO 8601 - format. - Defaults to 500ms. - :vartype max_queue_wait: ~datetime.timedelta - :ivar request_timeout: The scoring timeout in ISO 8601 format. - Defaults to 5000ms. - :vartype request_timeout: ~datetime.timedelta + All required parameters must be populated in order to send to Azure. + + :ivar base_environment_source: Base environment to start with. + :vartype base_environment_source: + ~azure.mgmt.machinelearningservices.models.BaseEnvironmentSource + :ivar environment_variables: Collection of environment variables. + :vartype environment_variables: dict[str, str] + :ivar inferencing_server: [Required] Inferencing server configurations. Required. + :vartype inferencing_server: ~azure.mgmt.machinelearningservices.models.InferencingServer + :ivar inputs: Collection of inputs. + :vartype inputs: list[~azure.mgmt.machinelearningservices.models.ModelPackageInput] + :ivar model_configuration: Model configuration including the mount mode. + :vartype model_configuration: ~azure.mgmt.machinelearningservices.models.ModelConfiguration + :ivar properties: Property dictionary. Properties can be added, removed, and updated. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar target_environment_id: [Required] Arm ID of the target environment to be created by + package operation. Required. + :vartype target_environment_id: str """ + _validation = { + "inferencing_server": {"required": True}, + "target_environment_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + } + _attribute_map = { - "max_concurrent_requests_per_instance": {"key": "maxConcurrentRequestsPerInstance", "type": "int"}, - "max_queue_wait": {"key": "maxQueueWait", "type": "duration"}, - "request_timeout": {"key": "requestTimeout", "type": "duration"}, + "base_environment_source": {"key": "baseEnvironmentSource", "type": "BaseEnvironmentSource"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "inferencing_server": {"key": "inferencingServer", "type": "InferencingServer"}, + "inputs": {"key": "inputs", "type": "[ModelPackageInput]"}, + "model_configuration": {"key": "modelConfiguration", "type": "ModelConfiguration"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "target_environment_id": {"key": "targetEnvironmentId", "type": "str"}, } def __init__( self, *, - max_concurrent_requests_per_instance: int = 1, - max_queue_wait: datetime.timedelta = "PT0.5S", - request_timeout: datetime.timedelta = "PT5S", + inferencing_server: "_models.InferencingServer", + target_environment_id: str, + base_environment_source: Optional["_models.BaseEnvironmentSource"] = None, + environment_variables: Optional[Dict[str, str]] = None, + inputs: Optional[List["_models.ModelPackageInput"]] = None, + model_configuration: Optional["_models.ModelConfiguration"] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, **kwargs: Any ) -> None: """ - :keyword max_concurrent_requests_per_instance: The number of maximum concurrent requests per - node allowed per deployment. Defaults to 1. - :paramtype max_concurrent_requests_per_instance: int - :keyword max_queue_wait: The maximum amount of time a request will stay in the queue in ISO - 8601 format. - Defaults to 500ms. - :paramtype max_queue_wait: ~datetime.timedelta - :keyword request_timeout: The scoring timeout in ISO 8601 format. - Defaults to 5000ms. - :paramtype request_timeout: ~datetime.timedelta + :keyword base_environment_source: Base environment to start with. + :paramtype base_environment_source: + ~azure.mgmt.machinelearningservices.models.BaseEnvironmentSource + :keyword environment_variables: Collection of environment variables. + :paramtype environment_variables: dict[str, str] + :keyword inferencing_server: [Required] Inferencing server configurations. Required. + :paramtype inferencing_server: ~azure.mgmt.machinelearningservices.models.InferencingServer + :keyword inputs: Collection of inputs. + :paramtype inputs: list[~azure.mgmt.machinelearningservices.models.ModelPackageInput] + :keyword model_configuration: Model configuration including the mount mode. + :paramtype model_configuration: ~azure.mgmt.machinelearningservices.models.ModelConfiguration + :keyword properties: Property dictionary. Properties can be added, removed, and updated. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword target_environment_id: [Required] Arm ID of the target environment to be created by + package operation. Required. + :paramtype target_environment_id: str """ super().__init__(**kwargs) - self.max_concurrent_requests_per_instance = max_concurrent_requests_per_instance - self.max_queue_wait = max_queue_wait - self.request_timeout = request_timeout + self.base_environment_source = base_environment_source + self.environment_variables = environment_variables + self.inferencing_server = inferencing_server + self.inputs = inputs + self.model_configuration = model_configuration + self.properties = properties + self.tags = tags + self.target_environment_id = target_environment_id -class OutputPathAssetReference(AssetReferenceBase): - """Reference to an asset via its path in a job output. +class PackageResponse(_serialization.Model): # pylint: disable=too-many-instance-attributes + """Package response returned after async package operation completes successfully. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. - :ivar reference_type: [Required] Specifies the type of asset reference. Required. Known values - are: "Id", "DataPath", and "OutputPath". - :vartype reference_type: str or ~azure.mgmt.machinelearningservices.models.ReferenceType - :ivar job_id: ARM resource ID of the job. - :vartype job_id: str - :ivar path: The path of the file/directory in the job output. - :vartype path: str + :ivar base_environment_source: Base environment to start with. + :vartype base_environment_source: + ~azure.mgmt.machinelearningservices.models.BaseEnvironmentSource + :ivar build_id: Build id of the image build operation. + :vartype build_id: str + :ivar build_state: Build state of the image build operation. Known values are: "NotStarted", + "Running", "Succeeded", and "Failed". + :vartype build_state: str or ~azure.mgmt.machinelearningservices.models.PackageBuildState + :ivar environment_variables: Collection of environment variables. + :vartype environment_variables: dict[str, str] + :ivar inferencing_server: Inferencing server configurations. + :vartype inferencing_server: ~azure.mgmt.machinelearningservices.models.InferencingServer + :ivar inputs: Collection of inputs. + :vartype inputs: list[~azure.mgmt.machinelearningservices.models.ModelPackageInput] + :ivar log_url: Log url of the image build operation. + :vartype log_url: str + :ivar model_configuration: Model configuration including the mount mode. + :vartype model_configuration: ~azure.mgmt.machinelearningservices.models.ModelConfiguration + :ivar properties: Property dictionary. Tags can be added, removed, and updated. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar target_environment_id: Asset ID of the target environment created by package operation. + :vartype target_environment_id: str """ _validation = { - "reference_type": {"required": True}, - } - - _attribute_map = { - "reference_type": {"key": "referenceType", "type": "str"}, - "job_id": {"key": "jobId", "type": "str"}, - "path": {"key": "path", "type": "str"}, + "base_environment_source": {"readonly": True}, + "build_id": {"readonly": True}, + "build_state": {"readonly": True}, + "environment_variables": {"readonly": True}, + "inferencing_server": {"readonly": True}, + "inputs": {"readonly": True}, + "log_url": {"readonly": True}, + "model_configuration": {"readonly": True}, + "properties": {"readonly": True}, + "tags": {"readonly": True}, + "target_environment_id": {"readonly": True}, + } + + _attribute_map = { + "base_environment_source": {"key": "baseEnvironmentSource", "type": "BaseEnvironmentSource"}, + "build_id": {"key": "buildId", "type": "str"}, + "build_state": {"key": "buildState", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "inferencing_server": {"key": "inferencingServer", "type": "InferencingServer"}, + "inputs": {"key": "inputs", "type": "[ModelPackageInput]"}, + "log_url": {"key": "logUrl", "type": "str"}, + "model_configuration": {"key": "modelConfiguration", "type": "ModelConfiguration"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "target_environment_id": {"key": "targetEnvironmentId", "type": "str"}, } - def __init__(self, *, job_id: Optional[str] = None, path: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword job_id: ARM resource ID of the job. - :paramtype job_id: str - :keyword path: The path of the file/directory in the job output. - :paramtype path: str - """ + def __init__(self, **kwargs: Any) -> None: + """ """ super().__init__(**kwargs) - self.reference_type: str = "OutputPath" - self.job_id = job_id - self.path = path + self.base_environment_source = None + self.build_id = None + self.build_state = None + self.environment_variables = None + self.inferencing_server = None + self.inputs = None + self.log_url = None + self.model_configuration = None + self.properties = None + self.tags = None + self.target_environment_id = None class PaginatedComputeResourcesList(_serialization.Model): @@ -16276,6 +24733,50 @@ def __init__( self.tags = tags +class PartialJobBase(_serialization.Model): + """Mutable base definition for a job. + + :ivar notification_setting: Mutable notification setting for the job. + :vartype notification_setting: + ~azure.mgmt.machinelearningservices.models.PartialNotificationSetting + """ + + _attribute_map = { + "notification_setting": {"key": "notificationSetting", "type": "PartialNotificationSetting"}, + } + + def __init__( + self, *, notification_setting: Optional["_models.PartialNotificationSetting"] = None, **kwargs: Any + ) -> None: + """ + :keyword notification_setting: Mutable notification setting for the job. + :paramtype notification_setting: + ~azure.mgmt.machinelearningservices.models.PartialNotificationSetting + """ + super().__init__(**kwargs) + self.notification_setting = notification_setting + + +class PartialJobBasePartialResource(_serialization.Model): + """Azure Resource Manager resource envelope strictly used in update requests. + + :ivar properties: Additional attributes of the entity. + :vartype properties: ~azure.mgmt.machinelearningservices.models.PartialJobBase + """ + + _attribute_map = { + "properties": {"key": "properties", "type": "PartialJobBase"}, + } + + def __init__(self, *, properties: Optional["_models.PartialJobBase"] = None, **kwargs: Any) -> None: + """ + :keyword properties: Additional attributes of the entity. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.PartialJobBase + """ + super().__init__(**kwargs) + self.properties = properties + + class PartialManagedServiceIdentity(_serialization.Model): """Managed service identity (system assigned and/or user assigned identities). @@ -16394,6 +24895,66 @@ def __init__( self.sku = sku +class PartialMinimalTrackedResourceWithSkuAndIdentity(PartialMinimalTrackedResource): + """Strictly used in update requests. + + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar identity: Managed service identity (system assigned and/or user assigned identities). + :vartype identity: ~azure.mgmt.machinelearningservices.models.PartialManagedServiceIdentity + :ivar sku: Sku details required for ARM contract for Autoscaling. + :vartype sku: ~azure.mgmt.machinelearningservices.models.PartialSku + """ + + _attribute_map = { + "tags": {"key": "tags", "type": "{str}"}, + "identity": {"key": "identity", "type": "PartialManagedServiceIdentity"}, + "sku": {"key": "sku", "type": "PartialSku"}, + } + + def __init__( + self, + *, + tags: Optional[Dict[str, str]] = None, + identity: Optional["_models.PartialManagedServiceIdentity"] = None, + sku: Optional["_models.PartialSku"] = None, + **kwargs: Any + ) -> None: + """ + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + :keyword identity: Managed service identity (system assigned and/or user assigned identities). + :paramtype identity: ~azure.mgmt.machinelearningservices.models.PartialManagedServiceIdentity + :keyword sku: Sku details required for ARM contract for Autoscaling. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.PartialSku + """ + super().__init__(tags=tags, **kwargs) + self.identity = identity + self.sku = sku + + +class PartialNotificationSetting(_serialization.Model): + """Mutable configuration for notification. + + :ivar webhooks: Send webhook callback to a service. Key is a user-provided name for the + webhook. + :vartype webhooks: dict[str, ~azure.mgmt.machinelearningservices.models.Webhook] + """ + + _attribute_map = { + "webhooks": {"key": "webhooks", "type": "{Webhook}"}, + } + + def __init__(self, *, webhooks: Optional[Dict[str, "_models.Webhook"]] = None, **kwargs: Any) -> None: + """ + :keyword webhooks: Send webhook callback to a service. Key is a user-provided name for the + webhook. + :paramtype webhooks: dict[str, ~azure.mgmt.machinelearningservices.models.Webhook] + """ + super().__init__(**kwargs) + self.webhooks = webhooks + + class PartialRegistryPartialTrackedResource(_serialization.Model): """Strictly used in update requests. @@ -16532,17 +25093,20 @@ class PATAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): All required parameters must be populated in order to send to Azure. :ivar auth_type: Authentication type of the connection target. Required. Known values are: - "PAT", "ManagedIdentity", "UsernamePassword", "None", and "SAS". + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", "AccessKey", + "ApiKey", and "CustomKeys". :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType :ivar category: Category of the connection. Known values are: "PythonFeed", - "ContainerRegistry", and "Git". + "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", + "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", + "CognitiveSearch", "CognitiveService", and "CustomKeys". :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar expiry_time: + :vartype expiry_time: ~datetime.datetime + :ivar metadata: Any object. + :vartype metadata: JSON :ivar target: :vartype target: str - :ivar value: Value details of the workspace connection. - :vartype value: str - :ivar value_format: format for the workspace connection value. "JSON" - :vartype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat :ivar credentials: :vartype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPersonalAccessToken @@ -16555,9 +25119,9 @@ class PATAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): _attribute_map = { "auth_type": {"key": "authType", "type": "str"}, "category": {"key": "category", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "metadata": {"key": "metadata", "type": "object"}, "target": {"key": "target", "type": "str"}, - "value": {"key": "value", "type": "str"}, - "value_format": {"key": "valueFormat", "type": "str"}, "credentials": {"key": "credentials", "type": "WorkspaceConnectionPersonalAccessToken"}, } @@ -16565,27 +25129,29 @@ def __init__( self, *, category: Optional[Union[str, "_models.ConnectionCategory"]] = None, + expiry_time: Optional[datetime.datetime] = None, + metadata: Optional[JSON] = None, target: Optional[str] = None, - value: Optional[str] = None, - value_format: Optional[Union[str, "_models.ValueFormat"]] = None, credentials: Optional["_models.WorkspaceConnectionPersonalAccessToken"] = None, **kwargs: Any ) -> None: """ :keyword category: Category of the connection. Known values are: "PythonFeed", - "ContainerRegistry", and "Git". + "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", + "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", + "CognitiveSearch", "CognitiveService", and "CustomKeys". :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :keyword expiry_time: + :paramtype expiry_time: ~datetime.datetime + :keyword metadata: Any object. + :paramtype metadata: JSON :keyword target: :paramtype target: str - :keyword value: Value details of the workspace connection. - :paramtype value: str - :keyword value_format: format for the workspace connection value. "JSON" - :paramtype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat :keyword credentials: :paramtype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPersonalAccessToken """ - super().__init__(category=category, target=target, value=value, value_format=value_format, **kwargs) + super().__init__(category=category, expiry_time=expiry_time, metadata=metadata, target=target, **kwargs) self.auth_type: str = "PAT" self.credentials = credentials @@ -16753,14 +25319,19 @@ class PipelineJob(JobBaseProperties): # pylint: disable=too-many-instance-attri :ivar is_archived: Is the asset archived?. :vartype is_archived: bool :ivar job_type: [Required] Specifies the type of job. Required. Known values are: "AutoML", - "Command", "Sweep", and "Pipeline". + "Command", "Labeling", "Sweep", "Pipeline", and "Spark". :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :ivar notification_setting: Notification setting for the job. + :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting + :ivar secrets_configuration: Configuration for secrets to be made available during runtime. + :vartype secrets_configuration: dict[str, + ~azure.mgmt.machinelearningservices.models.SecretConfiguration] :ivar services: List of JobEndpoints. For local jobs, a job endpoint will have an endpoint value of FileStreamObject. :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", - "Canceled", "NotResponding", "Paused", and "Unknown". + "Canceled", "NotResponding", "Paused", "Unknown", and "Scheduled". :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus :ivar inputs: Inputs for the pipeline job. :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] @@ -16790,6 +25361,8 @@ class PipelineJob(JobBaseProperties): # pylint: disable=too-many-instance-attri "identity": {"key": "identity", "type": "IdentityConfiguration"}, "is_archived": {"key": "isArchived", "type": "bool"}, "job_type": {"key": "jobType", "type": "str"}, + "notification_setting": {"key": "notificationSetting", "type": "NotificationSetting"}, + "secrets_configuration": {"key": "secretsConfiguration", "type": "{SecretConfiguration}"}, "services": {"key": "services", "type": "{JobService}"}, "status": {"key": "status", "type": "str"}, "inputs": {"key": "inputs", "type": "{JobInput}"}, @@ -16811,6 +25384,8 @@ def __init__( experiment_name: str = "Default", identity: Optional["_models.IdentityConfiguration"] = None, is_archived: bool = False, + notification_setting: Optional["_models.NotificationSetting"] = None, + secrets_configuration: Optional[Dict[str, "_models.SecretConfiguration"]] = None, services: Optional[Dict[str, "_models.JobService"]] = None, inputs: Optional[Dict[str, "_models.JobInput"]] = None, jobs: Optional[Dict[str, JSON]] = None, @@ -16841,6 +25416,11 @@ def __init__( :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration :keyword is_archived: Is the asset archived?. :paramtype is_archived: bool + :keyword notification_setting: Notification setting for the job. + :paramtype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting + :keyword secrets_configuration: Configuration for secrets to be made available during runtime. + :paramtype secrets_configuration: dict[str, + ~azure.mgmt.machinelearningservices.models.SecretConfiguration] :keyword services: List of JobEndpoints. For local jobs, a job endpoint will have an endpoint value of FileStreamObject. :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] @@ -16865,6 +25445,8 @@ def __init__( experiment_name=experiment_name, identity=identity, is_archived=is_archived, + notification_setting=notification_setting, + secrets_configuration=secrets_configuration, services=services, **kwargs ) @@ -16876,6 +25458,223 @@ def __init__( self.source_job_id = source_job_id +class PoolEnvironmentConfiguration(_serialization.Model): + """Environment configuration options. + + :ivar environment_id: ARM resource ID of the environment specification for the inference pool. + :vartype environment_id: str + :ivar environment_variables: Environment variables configuration for the inference pool. + :vartype environment_variables: dict[str, str] + :ivar liveness_probe: Liveness probe monitors the health of the container regularly. + :vartype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :ivar readiness_probe: Readiness probe validates if the container is ready to serve traffic. + The properties and defaults are the same as liveness probe. + :vartype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :ivar startup_probe: This verifies whether the application within a container is started. + Startup probes run before any other probe, and, unless it finishes successfully, disables other + probes. + :vartype startup_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + """ + + _attribute_map = { + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "liveness_probe": {"key": "livenessProbe", "type": "ProbeSettings"}, + "readiness_probe": {"key": "readinessProbe", "type": "ProbeSettings"}, + "startup_probe": {"key": "startupProbe", "type": "ProbeSettings"}, + } + + def __init__( + self, + *, + environment_id: Optional[str] = None, + environment_variables: Optional[Dict[str, str]] = None, + liveness_probe: Optional["_models.ProbeSettings"] = None, + readiness_probe: Optional["_models.ProbeSettings"] = None, + startup_probe: Optional["_models.ProbeSettings"] = None, + **kwargs: Any + ) -> None: + """ + :keyword environment_id: ARM resource ID of the environment specification for the inference + pool. + :paramtype environment_id: str + :keyword environment_variables: Environment variables configuration for the inference pool. + :paramtype environment_variables: dict[str, str] + :keyword liveness_probe: Liveness probe monitors the health of the container regularly. + :paramtype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :keyword readiness_probe: Readiness probe validates if the container is ready to serve traffic. + The properties and defaults are the same as liveness probe. + :paramtype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :keyword startup_probe: This verifies whether the application within a container is started. + Startup probes run before any other probe, and, unless it finishes successfully, disables other + probes. + :paramtype startup_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + """ + super().__init__(**kwargs) + self.environment_id = environment_id + self.environment_variables = environment_variables + self.liveness_probe = liveness_probe + self.readiness_probe = readiness_probe + self.startup_probe = startup_probe + + +class PoolModelConfiguration(_serialization.Model): + """Model configuration options. + + :ivar model_id: The URI path to the model. + :vartype model_id: str + """ + + _attribute_map = { + "model_id": {"key": "modelId", "type": "str"}, + } + + def __init__(self, *, model_id: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword model_id: The URI path to the model. + :paramtype model_id: str + """ + super().__init__(**kwargs) + self.model_id = model_id + + +class PoolStatus(_serialization.Model): + """PoolStatus. + + :ivar actual_capacity: Gets or sets the actual number of instances in the pool. + :vartype actual_capacity: int + :ivar event_log: Gets or sets event log for inference pool. + :vartype event_log: list[JSON] + :ivar group_count: Gets or sets the actual number of groups in the pool. + :vartype group_count: int + :ivar requested_capacity: Gets or sets the requested number of instances for the pool. + :vartype requested_capacity: int + :ivar reserved_capacity: Gets or sets the number of instances in the pool reserved by the + system. + :vartype reserved_capacity: int + """ + + _attribute_map = { + "actual_capacity": {"key": "actualCapacity", "type": "int"}, + "event_log": {"key": "eventLog", "type": "[object]"}, + "group_count": {"key": "groupCount", "type": "int"}, + "requested_capacity": {"key": "requestedCapacity", "type": "int"}, + "reserved_capacity": {"key": "reservedCapacity", "type": "int"}, + } + + def __init__( + self, + *, + actual_capacity: int = 0, + event_log: Optional[List[JSON]] = None, + group_count: int = 0, + requested_capacity: int = 0, + reserved_capacity: int = 0, + **kwargs: Any + ) -> None: + """ + :keyword actual_capacity: Gets or sets the actual number of instances in the pool. + :paramtype actual_capacity: int + :keyword event_log: Gets or sets event log for inference pool. + :paramtype event_log: list[JSON] + :keyword group_count: Gets or sets the actual number of groups in the pool. + :paramtype group_count: int + :keyword requested_capacity: Gets or sets the requested number of instances for the pool. + :paramtype requested_capacity: int + :keyword reserved_capacity: Gets or sets the number of instances in the pool reserved by the + system. + :paramtype reserved_capacity: int + """ + super().__init__(**kwargs) + self.actual_capacity = actual_capacity + self.event_log = event_log + self.group_count = group_count + self.requested_capacity = requested_capacity + self.reserved_capacity = reserved_capacity + + +class PredictionDriftMonitoringSignal(MonitoringSignalBase): + """PredictionDriftMonitoringSignal. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar signal_type: [Required] Specifies the type of signal to monitor. Required. Known values + are: "DataDrift", "PredictionDrift", "DataQuality", "FeatureAttributionDrift", "Custom", + "ModelPerformance", "GenerationSafetyQuality", and "GenerationTokenStatistics". + :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType + :ivar metric_thresholds: [Required] A list of metrics to calculate and their associated + thresholds. Required. + :vartype metric_thresholds: + list[~azure.mgmt.machinelearningservices.models.PredictionDriftMetricThresholdBase] + :ivar model_type: [Required] The type of the model monitored. Required. Known values are: + "Classification" and "Regression". + :vartype model_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringModelType + :ivar production_data: [Required] The data which drift will be calculated for. Required. + :vartype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase + :ivar reference_data: [Required] The data to calculate drift against. Required. + :vartype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase + """ + + _validation = { + "signal_type": {"required": True}, + "metric_thresholds": {"required": True}, + "model_type": {"required": True}, + "production_data": {"required": True}, + "reference_data": {"required": True}, + } + + _attribute_map = { + "mode": {"key": "mode", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "metric_thresholds": {"key": "metricThresholds", "type": "[PredictionDriftMetricThresholdBase]"}, + "model_type": {"key": "modelType", "type": "str"}, + "production_data": {"key": "productionData", "type": "MonitoringInputDataBase"}, + "reference_data": {"key": "referenceData", "type": "MonitoringInputDataBase"}, + } + + def __init__( + self, + *, + metric_thresholds: List["_models.PredictionDriftMetricThresholdBase"], + model_type: Union[str, "_models.MonitoringModelType"], + production_data: "_models.MonitoringInputDataBase", + reference_data: "_models.MonitoringInputDataBase", + mode: Optional[Union[str, "_models.MonitoringNotificationMode"]] = None, + properties: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> None: + """ + :keyword mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword metric_thresholds: [Required] A list of metrics to calculate and their associated + thresholds. Required. + :paramtype metric_thresholds: + list[~azure.mgmt.machinelearningservices.models.PredictionDriftMetricThresholdBase] + :keyword model_type: [Required] The type of the model monitored. Required. Known values are: + "Classification" and "Regression". + :paramtype model_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringModelType + :keyword production_data: [Required] The data which drift will be calculated for. Required. + :paramtype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase + :keyword reference_data: [Required] The data to calculate drift against. Required. + :paramtype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase + """ + super().__init__(mode=mode, properties=properties, **kwargs) + self.signal_type: str = "PredictionDrift" + self.metric_thresholds = metric_thresholds + self.model_type = model_type + self.production_data = production_data + self.reference_data = reference_data + + class PrivateEndpoint(_serialization.Model): """The Private Endpoint resource. @@ -16915,22 +25714,23 @@ class PrivateEndpointConnection(Resource): # pylint: disable=too-many-instance- :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar identity: The identity of the resource. + :ivar identity: Managed service identity (system assigned and/or user assigned identities). :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :ivar location: Specifies the location of the resource. + :ivar location: Same as workspace location. :vartype location: str - :ivar tags: Contains resource tags defined as key/value pairs. - :vartype tags: dict[str, str] - :ivar sku: The sku of the workspace. + :ivar sku: Optional. This field is required to be implemented by the RP because AML is + supporting more than one tier. :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku - :ivar private_endpoint: The resource of private end point. - :vartype private_endpoint: ~azure.mgmt.machinelearningservices.models.PrivateEndpoint - :ivar private_link_service_connection_state: A collection of information about the state of the - connection between service consumer and provider. + :ivar tags: Dictionary of :code:``. + :vartype tags: dict[str, str] + :ivar private_endpoint: The Private Endpoint resource. + :vartype private_endpoint: + ~azure.mgmt.machinelearningservices.models.WorkspacePrivateEndpointResource + :ivar private_link_service_connection_state: The connection state. :vartype private_link_service_connection_state: ~azure.mgmt.machinelearningservices.models.PrivateLinkServiceConnectionState - :ivar provisioning_state: The provisioning state of the private endpoint connection resource. - Known values are: "Succeeded", "Creating", "Deleting", and "Failed". + :ivar provisioning_state: The current provisioning state. Known values are: "Succeeded", + "Creating", "Deleting", and "Failed". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnectionProvisioningState """ @@ -16950,9 +25750,9 @@ class PrivateEndpointConnection(Resource): # pylint: disable=too-many-instance- "system_data": {"key": "systemData", "type": "SystemData"}, "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, "location": {"key": "location", "type": "str"}, - "tags": {"key": "tags", "type": "{str}"}, "sku": {"key": "sku", "type": "Sku"}, - "private_endpoint": {"key": "properties.privateEndpoint", "type": "PrivateEndpoint"}, + "tags": {"key": "tags", "type": "{str}"}, + "private_endpoint": {"key": "properties.privateEndpoint", "type": "WorkspacePrivateEndpointResource"}, "private_link_service_connection_state": { "key": "properties.privateLinkServiceConnectionState", "type": "PrivateLinkServiceConnectionState", @@ -16965,33 +25765,34 @@ def __init__( *, identity: Optional["_models.ManagedServiceIdentity"] = None, location: Optional[str] = None, - tags: Optional[Dict[str, str]] = None, sku: Optional["_models.Sku"] = None, - private_endpoint: Optional["_models.PrivateEndpoint"] = None, + tags: Optional[Dict[str, str]] = None, + private_endpoint: Optional["_models.WorkspacePrivateEndpointResource"] = None, private_link_service_connection_state: Optional["_models.PrivateLinkServiceConnectionState"] = None, **kwargs: Any ) -> None: """ - :keyword identity: The identity of the resource. + :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :keyword location: Specifies the location of the resource. + :keyword location: Same as workspace location. :paramtype location: str - :keyword tags: Contains resource tags defined as key/value pairs. - :paramtype tags: dict[str, str] - :keyword sku: The sku of the workspace. + :keyword sku: Optional. This field is required to be implemented by the RP because AML is + supporting more than one tier. :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku - :keyword private_endpoint: The resource of private end point. - :paramtype private_endpoint: ~azure.mgmt.machinelearningservices.models.PrivateEndpoint - :keyword private_link_service_connection_state: A collection of information about the state of - the connection between service consumer and provider. + :keyword tags: Dictionary of :code:``. + :paramtype tags: dict[str, str] + :keyword private_endpoint: The Private Endpoint resource. + :paramtype private_endpoint: + ~azure.mgmt.machinelearningservices.models.WorkspacePrivateEndpointResource + :keyword private_link_service_connection_state: The connection state. :paramtype private_link_service_connection_state: ~azure.mgmt.machinelearningservices.models.PrivateLinkServiceConnectionState """ super().__init__(**kwargs) self.identity = identity self.location = location - self.tags = tags self.sku = sku + self.tags = tags self.private_endpoint = private_endpoint self.private_link_service_connection_state = private_link_service_connection_state self.provisioning_state = None @@ -17017,6 +25818,109 @@ def __init__(self, *, value: Optional[List["_models.PrivateEndpointConnection"]] self.value = value +class PrivateEndpointDestination(_serialization.Model): + """Private Endpoint destination for a Private Endpoint Outbound Rule for the managed network of a + machine learning workspace. + + :ivar service_resource_id: + :vartype service_resource_id: str + :ivar spark_enabled: + :vartype spark_enabled: bool + :ivar spark_status: Type of a managed network Outbound Rule of a machine learning workspace. + Known values are: "Inactive" and "Active". + :vartype spark_status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus + :ivar subresource_target: + :vartype subresource_target: str + """ + + _attribute_map = { + "service_resource_id": {"key": "serviceResourceId", "type": "str"}, + "spark_enabled": {"key": "sparkEnabled", "type": "bool"}, + "spark_status": {"key": "sparkStatus", "type": "str"}, + "subresource_target": {"key": "subresourceTarget", "type": "str"}, + } + + def __init__( + self, + *, + service_resource_id: Optional[str] = None, + spark_enabled: Optional[bool] = None, + spark_status: Optional[Union[str, "_models.RuleStatus"]] = None, + subresource_target: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword service_resource_id: + :paramtype service_resource_id: str + :keyword spark_enabled: + :paramtype spark_enabled: bool + :keyword spark_status: Type of a managed network Outbound Rule of a machine learning workspace. + Known values are: "Inactive" and "Active". + :paramtype spark_status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus + :keyword subresource_target: + :paramtype subresource_target: str + """ + super().__init__(**kwargs) + self.service_resource_id = service_resource_id + self.spark_enabled = spark_enabled + self.spark_status = spark_status + self.subresource_target = subresource_target + + +class PrivateEndpointOutboundRule(OutboundRule): + """Private Endpoint Outbound Rule for the managed network of a machine learning workspace. + + All required parameters must be populated in order to send to Azure. + + :ivar category: Category of a managed network Outbound Rule of a machine learning workspace. + Known values are: "Required", "Recommended", and "UserDefined". + :vartype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory + :ivar status: Type of a managed network Outbound Rule of a machine learning workspace. Known + values are: "Inactive" and "Active". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus + :ivar type: Type of a managed network Outbound Rule of a machine learning workspace. Required. + Known values are: "FQDN", "PrivateEndpoint", and "ServiceTag". + :vartype type: str or ~azure.mgmt.machinelearningservices.models.RuleType + :ivar destination: Private Endpoint destination for a Private Endpoint Outbound Rule for the + managed network of a machine learning workspace. + :vartype destination: ~azure.mgmt.machinelearningservices.models.PrivateEndpointDestination + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "category": {"key": "category", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "destination": {"key": "destination", "type": "PrivateEndpointDestination"}, + } + + def __init__( + self, + *, + category: Optional[Union[str, "_models.RuleCategory"]] = None, + status: Optional[Union[str, "_models.RuleStatus"]] = None, + destination: Optional["_models.PrivateEndpointDestination"] = None, + **kwargs: Any + ) -> None: + """ + :keyword category: Category of a managed network Outbound Rule of a machine learning workspace. + Known values are: "Required", "Recommended", and "UserDefined". + :paramtype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory + :keyword status: Type of a managed network Outbound Rule of a machine learning workspace. Known + values are: "Inactive" and "Active". + :paramtype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus + :keyword destination: Private Endpoint destination for a Private Endpoint Outbound Rule for the + managed network of a machine learning workspace. + :paramtype destination: ~azure.mgmt.machinelearningservices.models.PrivateEndpointDestination + """ + super().__init__(category=category, status=status, **kwargs) + self.type: str = "PrivateEndpoint" + self.destination = destination + + class PrivateEndpointResource(PrivateEndpoint): """The PE network resource that is linked to this PE connection. @@ -17062,14 +25966,15 @@ class PrivateLinkResource(Resource): # pylint: disable=too-many-instance-attrib :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar identity: The identity of the resource. + :ivar identity: Managed service identity (system assigned and/or user assigned identities). :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :ivar location: Specifies the location of the resource. + :ivar location: Same as workspace location. :vartype location: str - :ivar tags: Contains resource tags defined as key/value pairs. - :vartype tags: dict[str, str] - :ivar sku: The sku of the workspace. + :ivar sku: Optional. This field is required to be implemented by the RP because AML is + supporting more than one tier. :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + :ivar tags: Dictionary of :code:``. + :vartype tags: dict[str, str] :ivar group_id: The private link resource group id. :vartype group_id: str :ivar required_members: The private link resource required member names. @@ -17083,8 +25988,6 @@ class PrivateLinkResource(Resource): # pylint: disable=too-many-instance-attrib "name": {"readonly": True}, "type": {"readonly": True}, "system_data": {"readonly": True}, - "group_id": {"readonly": True}, - "required_members": {"readonly": True}, } _attribute_map = { @@ -17094,8 +25997,8 @@ class PrivateLinkResource(Resource): # pylint: disable=too-many-instance-attrib "system_data": {"key": "systemData", "type": "SystemData"}, "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, "location": {"key": "location", "type": "str"}, - "tags": {"key": "tags", "type": "{str}"}, "sku": {"key": "sku", "type": "Sku"}, + "tags": {"key": "tags", "type": "{str}"}, "group_id": {"key": "properties.groupId", "type": "str"}, "required_members": {"key": "properties.requiredMembers", "type": "[str]"}, "required_zone_names": {"key": "properties.requiredZoneNames", "type": "[str]"}, @@ -17106,37 +26009,44 @@ def __init__( *, identity: Optional["_models.ManagedServiceIdentity"] = None, location: Optional[str] = None, - tags: Optional[Dict[str, str]] = None, sku: Optional["_models.Sku"] = None, + tags: Optional[Dict[str, str]] = None, + group_id: Optional[str] = None, + required_members: Optional[List[str]] = None, required_zone_names: Optional[List[str]] = None, **kwargs: Any ) -> None: """ - :keyword identity: The identity of the resource. + :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :keyword location: Specifies the location of the resource. + :keyword location: Same as workspace location. :paramtype location: str - :keyword tags: Contains resource tags defined as key/value pairs. - :paramtype tags: dict[str, str] - :keyword sku: The sku of the workspace. + :keyword sku: Optional. This field is required to be implemented by the RP because AML is + supporting more than one tier. :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + :keyword tags: Dictionary of :code:``. + :paramtype tags: dict[str, str] + :keyword group_id: The private link resource group id. + :paramtype group_id: str + :keyword required_members: The private link resource required member names. + :paramtype required_members: list[str] :keyword required_zone_names: The private link resource Private link DNS zone name. :paramtype required_zone_names: list[str] """ super().__init__(**kwargs) self.identity = identity self.location = location - self.tags = tags self.sku = sku - self.group_id = None - self.required_members = None + self.tags = tags + self.group_id = group_id + self.required_members = required_members self.required_zone_names = required_zone_names class PrivateLinkResourceListResult(_serialization.Model): """A list of private link resources. - :ivar value: Array of private link resources. + :ivar value: :vartype value: list[~azure.mgmt.machinelearningservices.models.PrivateLinkResource] """ @@ -17146,7 +26056,7 @@ class PrivateLinkResourceListResult(_serialization.Model): def __init__(self, *, value: Optional[List["_models.PrivateLinkResource"]] = None, **kwargs: Any) -> None: """ - :keyword value: Array of private link resources. + :keyword value: :paramtype value: list[~azure.mgmt.machinelearningservices.models.PrivateLinkResource] """ super().__init__(**kwargs) @@ -17157,48 +26067,46 @@ class PrivateLinkServiceConnectionState(_serialization.Model): """A collection of information about the state of the connection between service consumer and provider. - :ivar status: Indicates whether the connection has been Approved/Rejected/Removed by the owner - of the service. Known values are: "Pending", "Approved", "Rejected", "Disconnected", and - "Timeout". - :vartype status: str or - ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus - :ivar description: The reason for approval/rejection of the connection. - :vartype description: str - :ivar actions_required: A message indicating if changes on the service provider require any - updates on the consumer. + :ivar actions_required: Some RP chose "None". Other RPs use this for region expansion. :vartype actions_required: str + :ivar description: User-defined message that, per NRP doc, may be used for approval-related + message. + :vartype description: str + :ivar status: Connection status of the service consumer with the service provider. Known values + are: "Approved", "Pending", "Rejected", "Disconnected", and "Timeout". + :vartype status: str or + ~azure.mgmt.machinelearningservices.models.EndpointServiceConnectionStatus """ _attribute_map = { - "status": {"key": "status", "type": "str"}, - "description": {"key": "description", "type": "str"}, "actions_required": {"key": "actionsRequired", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "status": {"key": "status", "type": "str"}, } def __init__( self, *, - status: Optional[Union[str, "_models.PrivateEndpointServiceConnectionStatus"]] = None, - description: Optional[str] = None, actions_required: Optional[str] = None, + description: Optional[str] = None, + status: Optional[Union[str, "_models.EndpointServiceConnectionStatus"]] = None, **kwargs: Any ) -> None: """ - :keyword status: Indicates whether the connection has been Approved/Rejected/Removed by the - owner of the service. Known values are: "Pending", "Approved", "Rejected", "Disconnected", and - "Timeout". - :paramtype status: str or - ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus - :keyword description: The reason for approval/rejection of the connection. - :paramtype description: str - :keyword actions_required: A message indicating if changes on the service provider require any - updates on the consumer. + :keyword actions_required: Some RP chose "None". Other RPs use this for region expansion. :paramtype actions_required: str + :keyword description: User-defined message that, per NRP doc, may be used for approval-related + message. + :paramtype description: str + :keyword status: Connection status of the service consumer with the service provider. Known + values are: "Approved", "Pending", "Rejected", "Disconnected", and "Timeout". + :paramtype status: str or + ~azure.mgmt.machinelearningservices.models.EndpointServiceConnectionStatus """ super().__init__(**kwargs) - self.status = status - self.description = description self.actions_required = actions_required + self.description = description + self.status = status class ProbeSettings(_serialization.Model): @@ -17255,13 +26163,52 @@ def __init__( self.timeout = timeout +class ProgressMetrics(_serialization.Model): + """Progress metrics definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar completed_datapoint_count: The completed datapoint count. + :vartype completed_datapoint_count: int + :ivar incremental_data_last_refresh_date_time: The time of last successful incremental data + refresh in UTC. + :vartype incremental_data_last_refresh_date_time: ~datetime.datetime + :ivar skipped_datapoint_count: The skipped datapoint count. + :vartype skipped_datapoint_count: int + :ivar total_datapoint_count: The total datapoint count. + :vartype total_datapoint_count: int + """ + + _validation = { + "completed_datapoint_count": {"readonly": True}, + "incremental_data_last_refresh_date_time": {"readonly": True}, + "skipped_datapoint_count": {"readonly": True}, + "total_datapoint_count": {"readonly": True}, + } + + _attribute_map = { + "completed_datapoint_count": {"key": "completedDatapointCount", "type": "int"}, + "incremental_data_last_refresh_date_time": {"key": "incrementalDataLastRefreshDateTime", "type": "iso-8601"}, + "skipped_datapoint_count": {"key": "skippedDatapointCount", "type": "int"}, + "total_datapoint_count": {"key": "totalDatapointCount", "type": "int"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.completed_datapoint_count = None + self.incremental_data_last_refresh_date_time = None + self.skipped_datapoint_count = None + self.total_datapoint_count = None + + class PyTorch(DistributionConfiguration): """PyTorch distribution configuration. All required parameters must be populated in order to send to Azure. :ivar distribution_type: [Required] Specifies the type of distribution framework. Required. - Known values are: "PyTorch", "TensorFlow", and "Mpi". + Known values are: "PyTorch", "TensorFlow", "Mpi", and "Ray". :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType :ivar process_count_per_instance: Number of processes per node. :vartype process_count_per_instance: int @@ -17286,6 +26233,36 @@ def __init__(self, *, process_count_per_instance: Optional[int] = None, **kwargs self.process_count_per_instance = process_count_per_instance +class QueueSettings(_serialization.Model): + """QueueSettings. + + :ivar job_tier: Controls the compute job tier. Known values are: "Null", "Spot", "Basic", + "Standard", and "Premium". + :vartype job_tier: str or ~azure.mgmt.machinelearningservices.models.JobTier + :ivar priority: Controls the priority of the job on a compute. + :vartype priority: int + """ + + _attribute_map = { + "job_tier": {"key": "jobTier", "type": "str"}, + "priority": {"key": "priority", "type": "int"}, + } + + def __init__( + self, *, job_tier: Optional[Union[str, "_models.JobTier"]] = None, priority: Optional[int] = None, **kwargs: Any + ) -> None: + """ + :keyword job_tier: Controls the compute job tier. Known values are: "Null", "Spot", "Basic", + "Standard", and "Premium". + :paramtype job_tier: str or ~azure.mgmt.machinelearningservices.models.JobTier + :keyword priority: Controls the priority of the job on a compute. + :paramtype priority: int + """ + super().__init__(**kwargs) + self.job_tier = job_tier + self.priority = priority + + class QuotaBaseProperties(_serialization.Model): """The properties for Quota update or retrieval. @@ -17374,6 +26351,9 @@ class RandomSamplingAlgorithm(SamplingAlgorithm): "Bayesian". :vartype sampling_algorithm_type: str or ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + :ivar logbase: An optional positive number or e in string format to be used as base for log + based random sampling. + :vartype logbase: str :ivar rule: The specific type of random algorithm. Known values are: "Random" and "Sobol". :vartype rule: str or ~azure.mgmt.machinelearningservices.models.RandomSamplingAlgorithmRule :ivar seed: An optional integer to use as the seed for random number generation. @@ -17386,6 +26366,7 @@ class RandomSamplingAlgorithm(SamplingAlgorithm): _attribute_map = { "sampling_algorithm_type": {"key": "samplingAlgorithmType", "type": "str"}, + "logbase": {"key": "logbase", "type": "str"}, "rule": {"key": "rule", "type": "str"}, "seed": {"key": "seed", "type": "int"}, } @@ -17393,11 +26374,15 @@ class RandomSamplingAlgorithm(SamplingAlgorithm): def __init__( self, *, + logbase: Optional[str] = None, rule: Optional[Union[str, "_models.RandomSamplingAlgorithmRule"]] = None, seed: Optional[int] = None, **kwargs: Any ) -> None: """ + :keyword logbase: An optional positive number or e in string format to be used as base for log + based random sampling. + :paramtype logbase: str :keyword rule: The specific type of random algorithm. Known values are: "Random" and "Sobol". :paramtype rule: str or ~azure.mgmt.machinelearningservices.models.RandomSamplingAlgorithmRule :keyword seed: An optional integer to use as the seed for random number generation. @@ -17405,10 +26390,82 @@ def __init__( """ super().__init__(**kwargs) self.sampling_algorithm_type: str = "Random" + self.logbase = logbase self.rule = rule self.seed = seed +class Ray(DistributionConfiguration): + """Ray distribution configuration. + + All required parameters must be populated in order to send to Azure. + + :ivar distribution_type: [Required] Specifies the type of distribution framework. Required. + Known values are: "PyTorch", "TensorFlow", "Mpi", and "Ray". + :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType + :ivar address: The address of Ray head node. + :vartype address: str + :ivar dashboard_port: The port to bind the dashboard server to. + :vartype dashboard_port: int + :ivar head_node_additional_args: Additional arguments passed to ray start in head node. + :vartype head_node_additional_args: str + :ivar include_dashboard: Provide this argument to start the Ray dashboard GUI. + :vartype include_dashboard: bool + :ivar port: The port of the head ray process. + :vartype port: int + :ivar worker_node_additional_args: Additional arguments passed to ray start in worker node. + :vartype worker_node_additional_args: str + """ + + _validation = { + "distribution_type": {"required": True}, + } + + _attribute_map = { + "distribution_type": {"key": "distributionType", "type": "str"}, + "address": {"key": "address", "type": "str"}, + "dashboard_port": {"key": "dashboardPort", "type": "int"}, + "head_node_additional_args": {"key": "headNodeAdditionalArgs", "type": "str"}, + "include_dashboard": {"key": "includeDashboard", "type": "bool"}, + "port": {"key": "port", "type": "int"}, + "worker_node_additional_args": {"key": "workerNodeAdditionalArgs", "type": "str"}, + } + + def __init__( + self, + *, + address: Optional[str] = None, + dashboard_port: Optional[int] = None, + head_node_additional_args: Optional[str] = None, + include_dashboard: Optional[bool] = None, + port: Optional[int] = None, + worker_node_additional_args: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword address: The address of Ray head node. + :paramtype address: str + :keyword dashboard_port: The port to bind the dashboard server to. + :paramtype dashboard_port: int + :keyword head_node_additional_args: Additional arguments passed to ray start in head node. + :paramtype head_node_additional_args: str + :keyword include_dashboard: Provide this argument to start the Ray dashboard GUI. + :paramtype include_dashboard: bool + :keyword port: The port of the head ray process. + :paramtype port: int + :keyword worker_node_additional_args: Additional arguments passed to ray start in worker node. + :paramtype worker_node_additional_args: str + """ + super().__init__(**kwargs) + self.distribution_type: str = "Ray" + self.address = address + self.dashboard_port = dashboard_port + self.head_node_additional_args = head_node_additional_args + self.include_dashboard = include_dashboard + self.port = port + self.worker_node_additional_args = worker_node_additional_args + + class Recurrence(_serialization.Model): """The workflow trigger recurrence for ComputeStartStop schedule type. @@ -17788,12 +26845,12 @@ class RegistryListCredentialsResult(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :ivar location: + :ivar location: The location of the workspace ACR. :vartype location: str - :ivar username: - :vartype username: str :ivar passwords: :vartype passwords: list[~azure.mgmt.machinelearningservices.models.Password] + :ivar username: The username of the workspace ACR. + :vartype username: str """ _validation = { @@ -17803,8 +26860,8 @@ class RegistryListCredentialsResult(_serialization.Model): _attribute_map = { "location": {"key": "location", "type": "str"}, - "username": {"key": "username", "type": "str"}, "passwords": {"key": "passwords", "type": "[Password]"}, + "username": {"key": "username", "type": "str"}, } def __init__(self, *, passwords: Optional[List["_models.Password"]] = None, **kwargs: Any) -> None: @@ -17814,8 +26871,8 @@ def __init__(self, *, passwords: Optional[List["_models.Password"]] = None, **kw """ super().__init__(**kwargs) self.location = None - self.username = None self.passwords = passwords + self.username = None class RegistryPartialManagedServiceIdentity(ManagedServiceIdentity): @@ -17960,7 +27017,7 @@ class RegistryPrivateLinkServiceConnectionState(_serialization.Model): message. :vartype description: str :ivar status: Connection status of the service consumer with the service provider. Known values - are: "Approved", "Pending", "Rejected", and "Disconnected". + are: "Approved", "Pending", "Rejected", "Disconnected", and "Timeout". :vartype status: str or ~azure.mgmt.machinelearningservices.models.EndpointServiceConnectionStatus """ @@ -17986,7 +27043,7 @@ def __init__( message. :paramtype description: str :keyword status: Connection status of the service consumer with the service provider. Known - values are: "Approved", "Pending", "Rejected", and "Disconnected". + values are: "Approved", "Pending", "Rejected", "Disconnected", and "Timeout". :paramtype status: str or ~azure.mgmt.machinelearningservices.models.EndpointServiceConnectionStatus """ @@ -18090,11 +27147,19 @@ class Regression(TableVertical, AutoMLVertical): # pylint: disable=too-many-ins :ivar featurization_settings: Featurization inputs needed for AutoML job. :vartype featurization_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :ivar fixed_parameters: Model/training parameters that will remain constant throughout + training. + :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters :ivar limit_settings: Execution constraints for AutoMLJob. :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings :ivar n_cross_validations: Number of cross validation folds to be applied on training dataset when validation dataset is not provided. :vartype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] + :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings :ivar test_data: Test data input. :vartype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :ivar test_data_size: The fraction of test dataset that needs to be set aside for validation @@ -18134,8 +27199,11 @@ class Regression(TableVertical, AutoMLVertical): # pylint: disable=too-many-ins "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, "cv_split_column_names": {"key": "cvSplitColumnNames", "type": "[str]"}, "featurization_settings": {"key": "featurizationSettings", "type": "TableVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "TableFixedParameters"}, "limit_settings": {"key": "limitSettings", "type": "TableVerticalLimitSettings"}, "n_cross_validations": {"key": "nCrossValidations", "type": "NCrossValidations"}, + "search_space": {"key": "searchSpace", "type": "[TableParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "TableSweepSettings"}, "test_data": {"key": "testData", "type": "MLTableJobInput"}, "test_data_size": {"key": "testDataSize", "type": "float"}, "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, @@ -18153,8 +27221,11 @@ def __init__( target_column_name: Optional[str] = None, cv_split_column_names: Optional[List[str]] = None, featurization_settings: Optional["_models.TableVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.TableFixedParameters"] = None, limit_settings: Optional["_models.TableVerticalLimitSettings"] = None, n_cross_validations: Optional["_models.NCrossValidations"] = None, + search_space: Optional[List["_models.TableParameterSubspace"]] = None, + sweep_settings: Optional["_models.TableSweepSettings"] = None, test_data: Optional["_models.MLTableJobInput"] = None, test_data_size: Optional[float] = None, validation_data: Optional["_models.MLTableJobInput"] = None, @@ -18178,6 +27249,9 @@ def __init__( :keyword featurization_settings: Featurization inputs needed for AutoML job. :paramtype featurization_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :keyword fixed_parameters: Model/training parameters that will remain constant throughout + training. + :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters :keyword limit_settings: Execution constraints for AutoMLJob. :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings @@ -18185,6 +27259,12 @@ def __init__( dataset when validation dataset is not provided. :paramtype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] + :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings :keyword test_data: Test data input. :paramtype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :keyword test_data_size: The fraction of test dataset that needs to be set aside for validation @@ -18214,8 +27294,11 @@ def __init__( super().__init__( cv_split_column_names=cv_split_column_names, featurization_settings=featurization_settings, + fixed_parameters=fixed_parameters, limit_settings=limit_settings, n_cross_validations=n_cross_validations, + search_space=search_space, + sweep_settings=sweep_settings, test_data=test_data, test_data_size=test_data_size, validation_data=validation_data, @@ -18234,8 +27317,11 @@ def __init__( self.training_settings = training_settings self.cv_split_column_names = cv_split_column_names self.featurization_settings = featurization_settings + self.fixed_parameters = fixed_parameters self.limit_settings = limit_settings self.n_cross_validations = n_cross_validations + self.search_space = search_space + self.sweep_settings = sweep_settings self.test_data = test_data self.test_data_size = test_data_size self.validation_data = validation_data @@ -18243,6 +27329,55 @@ def __init__( self.weight_column_name = weight_column_name +class RegressionModelPerformanceMetricThreshold(ModelPerformanceMetricThresholdBase): + """RegressionModelPerformanceMetricThreshold. + + All required parameters must be populated in order to send to Azure. + + :ivar model_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Classification" and "Regression". + :vartype model_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringModelType + :ivar threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + :ivar metric: [Required] The regression model performance metric to calculate. Required. Known + values are: "MeanAbsoluteError", "RootMeanSquaredError", and "MeanSquaredError". + :vartype metric: str or + ~azure.mgmt.machinelearningservices.models.RegressionModelPerformanceMetric + """ + + _validation = { + "model_type": {"required": True}, + "metric": {"required": True}, + } + + _attribute_map = { + "model_type": {"key": "modelType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, + } + + def __init__( + self, + *, + metric: Union[str, "_models.RegressionModelPerformanceMetric"], + threshold: Optional["_models.MonitoringThreshold"] = None, + **kwargs: Any + ) -> None: + """ + :keyword threshold: The threshold value. If null, a default value will be set depending on the + selected metric. + :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold + :keyword metric: [Required] The regression model performance metric to calculate. Required. + Known values are: "MeanAbsoluteError", "RootMeanSquaredError", and "MeanSquaredError". + :paramtype metric: str or + ~azure.mgmt.machinelearningservices.models.RegressionModelPerformanceMetric + """ + super().__init__(threshold=threshold, **kwargs) + self.model_type: str = "Regression" + self.metric = metric + + class RegressionTrainingSettings(TrainingSettings): """Regression Training related configuration. @@ -18263,6 +27398,14 @@ class RegressionTrainingSettings(TrainingSettings): :ivar stack_ensemble_settings: Stack ensemble settings for stack ensemble run. :vartype stack_ensemble_settings: ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + :ivar training_mode: TrainingMode mode - Setting to 'auto' is same as setting it to + 'non-distributed' for now, however in the future may result in mixed mode or heuristics based + mode selection. Default is 'auto'. + If 'Distributed' then only distributed featurization is used and distributed algorithms are + chosen. + If 'NonDistributed' then only non distributed algorithms are chosen. Known values are: "Auto", + "Distributed", and "NonDistributed". + :vartype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode :ivar allowed_training_algorithms: Allowed models for regression task. :vartype allowed_training_algorithms: list[str or ~azure.mgmt.machinelearningservices.models.RegressionModels] @@ -18279,6 +27422,7 @@ class RegressionTrainingSettings(TrainingSettings): "enable_vote_ensemble": {"key": "enableVoteEnsemble", "type": "bool"}, "ensemble_model_download_timeout": {"key": "ensembleModelDownloadTimeout", "type": "duration"}, "stack_ensemble_settings": {"key": "stackEnsembleSettings", "type": "StackEnsembleSettings"}, + "training_mode": {"key": "trainingMode", "type": "str"}, "allowed_training_algorithms": {"key": "allowedTrainingAlgorithms", "type": "[str]"}, "blocked_training_algorithms": {"key": "blockedTrainingAlgorithms", "type": "[str]"}, } @@ -18293,6 +27437,7 @@ def __init__( enable_vote_ensemble: bool = True, ensemble_model_download_timeout: datetime.timedelta = "PT5M", stack_ensemble_settings: Optional["_models.StackEnsembleSettings"] = None, + training_mode: Optional[Union[str, "_models.TrainingMode"]] = None, allowed_training_algorithms: Optional[List[Union[str, "_models.RegressionModels"]]] = None, blocked_training_algorithms: Optional[List[Union[str, "_models.RegressionModels"]]] = None, **kwargs: Any @@ -18315,6 +27460,14 @@ def __init__( :keyword stack_ensemble_settings: Stack ensemble settings for stack ensemble run. :paramtype stack_ensemble_settings: ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + :keyword training_mode: TrainingMode mode - Setting to 'auto' is same as setting it to + 'non-distributed' for now, however in the future may result in mixed mode or heuristics based + mode selection. Default is 'auto'. + If 'Distributed' then only distributed featurization is used and distributed algorithms are + chosen. + If 'NonDistributed' then only non distributed algorithms are chosen. Known values are: "Auto", + "Distributed", and "NonDistributed". + :paramtype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode :keyword allowed_training_algorithms: Allowed models for regression task. :paramtype allowed_training_algorithms: list[str or ~azure.mgmt.machinelearningservices.models.RegressionModels] @@ -18330,12 +27483,93 @@ def __init__( enable_vote_ensemble=enable_vote_ensemble, ensemble_model_download_timeout=ensemble_model_download_timeout, stack_ensemble_settings=stack_ensemble_settings, + training_mode=training_mode, **kwargs ) self.allowed_training_algorithms = allowed_training_algorithms self.blocked_training_algorithms = blocked_training_algorithms +class RequestConfiguration(_serialization.Model): + """Scoring requests configuration. + + :ivar max_concurrent_requests_per_instance: The number of maximum concurrent requests per node + allowed per deployment. Defaults to 1. + :vartype max_concurrent_requests_per_instance: int + :ivar request_timeout: The scoring timeout in ISO 8601 format. + Defaults to 5000ms. + :vartype request_timeout: ~datetime.timedelta + """ + + _attribute_map = { + "max_concurrent_requests_per_instance": {"key": "maxConcurrentRequestsPerInstance", "type": "int"}, + "request_timeout": {"key": "requestTimeout", "type": "duration"}, + } + + def __init__( + self, + *, + max_concurrent_requests_per_instance: int = 1, + request_timeout: datetime.timedelta = "PT5S", + **kwargs: Any + ) -> None: + """ + :keyword max_concurrent_requests_per_instance: The number of maximum concurrent requests per + node allowed per deployment. Defaults to 1. + :paramtype max_concurrent_requests_per_instance: int + :keyword request_timeout: The scoring timeout in ISO 8601 format. + Defaults to 5000ms. + :paramtype request_timeout: ~datetime.timedelta + """ + super().__init__(**kwargs) + self.max_concurrent_requests_per_instance = max_concurrent_requests_per_instance + self.request_timeout = request_timeout + + +class RequestLogging(_serialization.Model): + """RequestLogging. + + :ivar capture_headers: For payload logging, we only collect payload by default. If customers + also want to collect the specified headers, they can set them in captureHeaders so that backend + will collect those headers along with payload. + :vartype capture_headers: list[str] + """ + + _attribute_map = { + "capture_headers": {"key": "captureHeaders", "type": "[str]"}, + } + + def __init__(self, *, capture_headers: Optional[List[str]] = None, **kwargs: Any) -> None: + """ + :keyword capture_headers: For payload logging, we only collect payload by default. If customers + also want to collect the specified headers, they can set them in captureHeaders so that backend + will collect those headers along with payload. + :paramtype capture_headers: list[str] + """ + super().__init__(**kwargs) + self.capture_headers = capture_headers + + +class ResizeSchema(_serialization.Model): + """Schema for Compute Instance resize. + + :ivar target_vm_size: The name of the virtual machine size. + :vartype target_vm_size: str + """ + + _attribute_map = { + "target_vm_size": {"key": "targetVMSize", "type": "str"}, + } + + def __init__(self, *, target_vm_size: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword target_vm_size: The name of the virtual machine size. + :paramtype target_vm_size: str + """ + super().__init__(**kwargs) + self.target_vm_size = target_vm_size + + class ResourceId(_serialization.Model): """Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet. @@ -18477,17 +27711,20 @@ class SASAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): All required parameters must be populated in order to send to Azure. :ivar auth_type: Authentication type of the connection target. Required. Known values are: - "PAT", "ManagedIdentity", "UsernamePassword", "None", and "SAS". + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", "AccessKey", + "ApiKey", and "CustomKeys". :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType :ivar category: Category of the connection. Known values are: "PythonFeed", - "ContainerRegistry", and "Git". + "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", + "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", + "CognitiveSearch", "CognitiveService", and "CustomKeys". :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar expiry_time: + :vartype expiry_time: ~datetime.datetime + :ivar metadata: Any object. + :vartype metadata: JSON :ivar target: :vartype target: str - :ivar value: Value details of the workspace connection. - :vartype value: str - :ivar value_format: format for the workspace connection value. "JSON" - :vartype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat :ivar credentials: :vartype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionSharedAccessSignature @@ -18500,9 +27737,9 @@ class SASAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): _attribute_map = { "auth_type": {"key": "authType", "type": "str"}, "category": {"key": "category", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "metadata": {"key": "metadata", "type": "object"}, "target": {"key": "target", "type": "str"}, - "value": {"key": "value", "type": "str"}, - "value_format": {"key": "valueFormat", "type": "str"}, "credentials": {"key": "credentials", "type": "WorkspaceConnectionSharedAccessSignature"}, } @@ -18510,27 +27747,29 @@ def __init__( self, *, category: Optional[Union[str, "_models.ConnectionCategory"]] = None, + expiry_time: Optional[datetime.datetime] = None, + metadata: Optional[JSON] = None, target: Optional[str] = None, - value: Optional[str] = None, - value_format: Optional[Union[str, "_models.ValueFormat"]] = None, credentials: Optional["_models.WorkspaceConnectionSharedAccessSignature"] = None, **kwargs: Any ) -> None: """ :keyword category: Category of the connection. Known values are: "PythonFeed", - "ContainerRegistry", and "Git". + "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", + "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", + "CognitiveSearch", "CognitiveService", and "CustomKeys". :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :keyword expiry_time: + :paramtype expiry_time: ~datetime.datetime + :keyword metadata: Any object. + :paramtype metadata: JSON :keyword target: :paramtype target: str - :keyword value: Value details of the workspace connection. - :paramtype value: str - :keyword value_format: format for the workspace connection value. "JSON" - :paramtype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat :keyword credentials: :paramtype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionSharedAccessSignature """ - super().__init__(category=category, target=target, value=value, value_format=value_format, **kwargs) + super().__init__(category=category, expiry_time=expiry_time, metadata=metadata, target=target, **kwargs) self.auth_type: str = "SAS" self.credentials = credentials @@ -18573,7 +27812,8 @@ class SasDatastoreCredentials(DatastoreCredentials): All required parameters must be populated in order to send to Azure. :ivar credentials_type: [Required] Credential type used to authentication with storage. - Required. Known values are: "AccountKey", "Certificate", "None", "Sas", and "ServicePrincipal". + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType :ivar secrets: [Required] Storage container secrets. Required. :vartype secrets: ~azure.mgmt.machinelearningservices.models.SasDatastoreSecrets @@ -18605,7 +27845,8 @@ class SasDatastoreSecrets(DatastoreSecrets): All required parameters must be populated in order to send to Azure. :ivar secrets_type: [Required] Credential type used to authentication with storage. Required. - Known values are: "AccountKey", "Certificate", "Sas", and "ServicePrincipal". + Known values are: "AccountKey", "Certificate", "Sas", "ServicePrincipal", "KerberosPassword", + and "KerberosKeytab". :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType :ivar sas_token: Storage container SAS token. :vartype sas_token: str @@ -18901,7 +28142,7 @@ def __init__( class ScriptReference(_serialization.Model): """Script reference. - :ivar script_source: The storage source of the script: workspace. + :ivar script_source: The storage source of the script: inline, workspace. :vartype script_source: str :ivar script_data: The location of scripts in the mounted volume. :vartype script_data: str @@ -18928,7 +28169,7 @@ def __init__( **kwargs: Any ) -> None: """ - :keyword script_source: The storage source of the script: workspace. + :keyword script_source: The storage source of the script: inline, workspace. :paramtype script_source: str :keyword script_data: The location of scripts in the mounted volume. :paramtype script_data: str @@ -18976,10 +28217,205 @@ def __init__( self.creation_script = creation_script +class SecretConfiguration(_serialization.Model): + """Secret Configuration definition. + + :ivar uri: Secret Uri. + Sample Uri : https://myvault.vault.azure.net/secrets/mysecretname/secretversion. + :vartype uri: str + :ivar workspace_secret_name: Name of secret in workspace key vault. + :vartype workspace_secret_name: str + """ + + _attribute_map = { + "uri": {"key": "uri", "type": "str"}, + "workspace_secret_name": {"key": "workspaceSecretName", "type": "str"}, + } + + def __init__( + self, *, uri: Optional[str] = None, workspace_secret_name: Optional[str] = None, **kwargs: Any + ) -> None: + """ + :keyword uri: Secret Uri. + Sample Uri : https://myvault.vault.azure.net/secrets/mysecretname/secretversion. + :paramtype uri: str + :keyword workspace_secret_name: Name of secret in workspace key vault. + :paramtype workspace_secret_name: str + """ + super().__init__(**kwargs) + self.uri = uri + self.workspace_secret_name = workspace_secret_name + + +class ServerlessEndpoint(TrackedResource): + """ServerlessEndpoint. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar identity: Managed service identity (system assigned and/or user assigned identities). + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :vartype kind: str + :ivar properties: [Required] Additional attributes of the entity. Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.ServerlessEndpointProperties + :ivar sku: Sku details required for ARM contract for Autoscaling. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "ServerlessEndpointProperties"}, + "sku": {"key": "sku", "type": "Sku"}, + } + + def __init__( + self, + *, + location: str, + properties: "_models.ServerlessEndpointProperties", + tags: Optional[Dict[str, str]] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, + kind: Optional[str] = None, + sku: Optional["_models.Sku"] = None, + **kwargs: Any + ) -> None: + """ + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + :keyword location: The geo-location where the resource lives. Required. + :paramtype location: str + :keyword identity: Managed service identity (system assigned and/or user assigned identities). + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :paramtype kind: str + :keyword properties: [Required] Additional attributes of the entity. Required. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.ServerlessEndpointProperties + :keyword sku: Sku details required for ARM contract for Autoscaling. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + super().__init__(tags=tags, location=location, **kwargs) + self.identity = identity + self.kind = kind + self.properties = properties + self.sku = sku + + +class ServerlessEndpointProperties(_serialization.Model): + """ServerlessEndpointProperties. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar inference_uri: The inference uri to target when making requests against the serverless + endpoint. + :vartype inference_uri: str + :ivar model_profile: [Required] The model profile to configure the serverless endpoint with. + Required. + :vartype model_profile: ~azure.mgmt.machinelearningservices.models.ModelProfile + :ivar provisioning_state: Provisioning state for the endpoint. Known values are: "Creating", + "Deleting", "Succeeded", "Failed", "Updating", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.EndpointProvisioningState + """ + + _validation = { + "inference_uri": {"readonly": True}, + "model_profile": {"required": True}, + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "inference_uri": {"key": "inferenceUri", "type": "str"}, + "model_profile": {"key": "modelProfile", "type": "ModelProfile"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + } + + def __init__(self, *, model_profile: "_models.ModelProfile", **kwargs: Any) -> None: + """ + :keyword model_profile: [Required] The model profile to configure the serverless endpoint with. + Required. + :paramtype model_profile: ~azure.mgmt.machinelearningservices.models.ModelProfile + """ + super().__init__(**kwargs) + self.inference_uri = None + self.model_profile = model_profile + self.provisioning_state = None + + +class ServerlessEndpointTrackedResourceArmPaginatedResult(_serialization.Model): + """A paginated list of ServerlessEndpoint entities. + + :ivar next_link: The link to the next page of ServerlessEndpoint objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type ServerlessEndpoint. + :vartype value: list[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + """ + + _attribute_map = { + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[ServerlessEndpoint]"}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.ServerlessEndpoint"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword next_link: The link to the next page of ServerlessEndpoint objects. If null, there are + no additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type ServerlessEndpoint. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + """ + super().__init__(**kwargs) + self.next_link = next_link + self.value = value + + class ServiceManagedResourcesSettings(_serialization.Model): """ServiceManagedResourcesSettings. - :ivar cosmos_db: The settings for the service managed cosmosdb account. + :ivar cosmos_db: :vartype cosmos_db: ~azure.mgmt.machinelearningservices.models.CosmosDbSettings """ @@ -18989,20 +28425,90 @@ class ServiceManagedResourcesSettings(_serialization.Model): def __init__(self, *, cosmos_db: Optional["_models.CosmosDbSettings"] = None, **kwargs: Any) -> None: """ - :keyword cosmos_db: The settings for the service managed cosmosdb account. + :keyword cosmos_db: :paramtype cosmos_db: ~azure.mgmt.machinelearningservices.models.CosmosDbSettings """ super().__init__(**kwargs) self.cosmos_db = cosmos_db +class ServicePrincipalAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): + """ServicePrincipalAuthTypeWorkspaceConnectionProperties. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", "AccessKey", + "ApiKey", and "CustomKeys". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", + "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", + "CognitiveSearch", "CognitiveService", and "CustomKeys". + :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar expiry_time: + :vartype expiry_time: ~datetime.datetime + :ivar metadata: Any object. + :vartype metadata: JSON + :ivar target: + :vartype target: str + :ivar credentials: + :vartype credentials: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionServicePrincipal + """ + + _validation = { + "auth_type": {"required": True}, + } + + _attribute_map = { + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "metadata": {"key": "metadata", "type": "object"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionServicePrincipal"}, + } + + def __init__( + self, + *, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, + expiry_time: Optional[datetime.datetime] = None, + metadata: Optional[JSON] = None, + target: Optional[str] = None, + credentials: Optional["_models.WorkspaceConnectionServicePrincipal"] = None, + **kwargs: Any + ) -> None: + """ + :keyword category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", + "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", + "CognitiveSearch", "CognitiveService", and "CustomKeys". + :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :keyword expiry_time: + :paramtype expiry_time: ~datetime.datetime + :keyword metadata: Any object. + :paramtype metadata: JSON + :keyword target: + :paramtype target: str + :keyword credentials: + :paramtype credentials: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionServicePrincipal + """ + super().__init__(category=category, expiry_time=expiry_time, metadata=metadata, target=target, **kwargs) + self.auth_type: str = "ServicePrincipal" + self.credentials = credentials + + class ServicePrincipalDatastoreCredentials(DatastoreCredentials): """Service Principal datastore credentials configuration. All required parameters must be populated in order to send to Azure. :ivar credentials_type: [Required] Credential type used to authentication with storage. - Required. Known values are: "AccountKey", "Certificate", "None", "Sas", and "ServicePrincipal". + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType :ivar authority_url: Authority URL used for authentication. :vartype authority_url: str @@ -19064,35 +28570,147 @@ def __init__( self.tenant_id = tenant_id -class ServicePrincipalDatastoreSecrets(DatastoreSecrets): - """Datastore Service Principal secrets. +class ServicePrincipalDatastoreSecrets(DatastoreSecrets): + """Datastore Service Principal secrets. + + All required parameters must be populated in order to send to Azure. + + :ivar secrets_type: [Required] Credential type used to authentication with storage. Required. + Known values are: "AccountKey", "Certificate", "Sas", "ServicePrincipal", "KerberosPassword", + and "KerberosKeytab". + :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType + :ivar client_secret: Service principal secret. + :vartype client_secret: str + """ + + _validation = { + "secrets_type": {"required": True}, + } + + _attribute_map = { + "secrets_type": {"key": "secretsType", "type": "str"}, + "client_secret": {"key": "clientSecret", "type": "str"}, + } + + def __init__(self, *, client_secret: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword client_secret: Service principal secret. + :paramtype client_secret: str + """ + super().__init__(**kwargs) + self.secrets_type: str = "ServicePrincipal" + self.client_secret = client_secret + + +class ServiceTagDestination(_serialization.Model): + """Service Tag destination for a Service Tag Outbound Rule for the managed network of a machine + learning workspace. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar action: The action enum for networking rule. Known values are: "Allow" and "Deny". + :vartype action: str or ~azure.mgmt.machinelearningservices.models.RuleAction + :ivar address_prefixes: Optional, if provided, the ServiceTag property will be ignored. + :vartype address_prefixes: list[str] + :ivar port_ranges: + :vartype port_ranges: str + :ivar protocol: + :vartype protocol: str + :ivar service_tag: + :vartype service_tag: str + """ + + _validation = { + "address_prefixes": {"readonly": True}, + } + + _attribute_map = { + "action": {"key": "action", "type": "str"}, + "address_prefixes": {"key": "addressPrefixes", "type": "[str]"}, + "port_ranges": {"key": "portRanges", "type": "str"}, + "protocol": {"key": "protocol", "type": "str"}, + "service_tag": {"key": "serviceTag", "type": "str"}, + } + + def __init__( + self, + *, + action: Optional[Union[str, "_models.RuleAction"]] = None, + port_ranges: Optional[str] = None, + protocol: Optional[str] = None, + service_tag: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword action: The action enum for networking rule. Known values are: "Allow" and "Deny". + :paramtype action: str or ~azure.mgmt.machinelearningservices.models.RuleAction + :keyword port_ranges: + :paramtype port_ranges: str + :keyword protocol: + :paramtype protocol: str + :keyword service_tag: + :paramtype service_tag: str + """ + super().__init__(**kwargs) + self.action = action + self.address_prefixes = None + self.port_ranges = port_ranges + self.protocol = protocol + self.service_tag = service_tag + + +class ServiceTagOutboundRule(OutboundRule): + """Service Tag Outbound Rule for the managed network of a machine learning workspace. All required parameters must be populated in order to send to Azure. - :ivar secrets_type: [Required] Credential type used to authentication with storage. Required. - Known values are: "AccountKey", "Certificate", "Sas", and "ServicePrincipal". - :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType - :ivar client_secret: Service principal secret. - :vartype client_secret: str + :ivar category: Category of a managed network Outbound Rule of a machine learning workspace. + Known values are: "Required", "Recommended", and "UserDefined". + :vartype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory + :ivar status: Type of a managed network Outbound Rule of a machine learning workspace. Known + values are: "Inactive" and "Active". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus + :ivar type: Type of a managed network Outbound Rule of a machine learning workspace. Required. + Known values are: "FQDN", "PrivateEndpoint", and "ServiceTag". + :vartype type: str or ~azure.mgmt.machinelearningservices.models.RuleType + :ivar destination: Service Tag destination for a Service Tag Outbound Rule for the managed + network of a machine learning workspace. + :vartype destination: ~azure.mgmt.machinelearningservices.models.ServiceTagDestination """ _validation = { - "secrets_type": {"required": True}, + "type": {"required": True}, } _attribute_map = { - "secrets_type": {"key": "secretsType", "type": "str"}, - "client_secret": {"key": "clientSecret", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "destination": {"key": "destination", "type": "ServiceTagDestination"}, } - def __init__(self, *, client_secret: Optional[str] = None, **kwargs: Any) -> None: + def __init__( + self, + *, + category: Optional[Union[str, "_models.RuleCategory"]] = None, + status: Optional[Union[str, "_models.RuleStatus"]] = None, + destination: Optional["_models.ServiceTagDestination"] = None, + **kwargs: Any + ) -> None: """ - :keyword client_secret: Service principal secret. - :paramtype client_secret: str + :keyword category: Category of a managed network Outbound Rule of a machine learning workspace. + Known values are: "Required", "Recommended", and "UserDefined". + :paramtype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory + :keyword status: Type of a managed network Outbound Rule of a machine learning workspace. Known + values are: "Inactive" and "Active". + :paramtype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus + :keyword destination: Service Tag destination for a Service Tag Outbound Rule for the managed + network of a machine learning workspace. + :paramtype destination: ~azure.mgmt.machinelearningservices.models.ServiceTagDestination """ - super().__init__(**kwargs) - self.secrets_type: str = "ServicePrincipal" - self.client_secret = client_secret + super().__init__(category=category, status=status, **kwargs) + self.type: str = "ServiceTag" + self.destination = destination class SetupScripts(_serialization.Model): @@ -19120,23 +28738,22 @@ class SharedPrivateLinkResource(_serialization.Model): :ivar name: Unique name of the private link. :vartype name: str - :ivar private_link_resource_id: The resource id that private link links to. - :vartype private_link_resource_id: str - :ivar group_id: The private link resource group id. + :ivar group_id: group id of the private link. :vartype group_id: str + :ivar private_link_resource_id: the resource id that private link links to. + :vartype private_link_resource_id: str :ivar request_message: Request message. :vartype request_message: str - :ivar status: Indicates whether the connection has been Approved/Rejected/Removed by the owner - of the service. Known values are: "Pending", "Approved", "Rejected", "Disconnected", and - "Timeout". + :ivar status: Connection status of the service consumer with the service provider. Known values + are: "Approved", "Pending", "Rejected", "Disconnected", and "Timeout". :vartype status: str or - ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus + ~azure.mgmt.machinelearningservices.models.EndpointServiceConnectionStatus """ _attribute_map = { "name": {"key": "name", "type": "str"}, - "private_link_resource_id": {"key": "properties.privateLinkResourceId", "type": "str"}, "group_id": {"key": "properties.groupId", "type": "str"}, + "private_link_resource_id": {"key": "properties.privateLinkResourceId", "type": "str"}, "request_message": {"key": "properties.requestMessage", "type": "str"}, "status": {"key": "properties.status", "type": "str"}, } @@ -19145,31 +28762,30 @@ def __init__( self, *, name: Optional[str] = None, - private_link_resource_id: Optional[str] = None, group_id: Optional[str] = None, + private_link_resource_id: Optional[str] = None, request_message: Optional[str] = None, - status: Optional[Union[str, "_models.PrivateEndpointServiceConnectionStatus"]] = None, + status: Optional[Union[str, "_models.EndpointServiceConnectionStatus"]] = None, **kwargs: Any ) -> None: """ :keyword name: Unique name of the private link. :paramtype name: str - :keyword private_link_resource_id: The resource id that private link links to. - :paramtype private_link_resource_id: str - :keyword group_id: The private link resource group id. + :keyword group_id: group id of the private link. :paramtype group_id: str + :keyword private_link_resource_id: the resource id that private link links to. + :paramtype private_link_resource_id: str :keyword request_message: Request message. :paramtype request_message: str - :keyword status: Indicates whether the connection has been Approved/Rejected/Removed by the - owner of the service. Known values are: "Pending", "Approved", "Rejected", "Disconnected", and - "Timeout". + :keyword status: Connection status of the service consumer with the service provider. Known + values are: "Approved", "Pending", "Rejected", "Disconnected", and "Timeout". :paramtype status: str or - ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus + ~azure.mgmt.machinelearningservices.models.EndpointServiceConnectionStatus """ super().__init__(**kwargs) self.name = name - self.private_link_resource_id = private_link_resource_id self.group_id = group_id + self.private_link_resource_id = private_link_resource_id self.request_message = request_message self.status = status @@ -19397,8 +29013,361 @@ def __init__(self, *, name: str, tier: Optional[Union[str, "_models.SkuTier"]] = :paramtype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier """ super().__init__(**kwargs) - self.name = name - self.tier = tier + self.name = name + self.tier = tier + + +class SparkJob(JobBaseProperties): # pylint: disable=too-many-instance-attributes + """Spark job definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar component_id: ARM resource ID of the component resource. + :vartype component_id: str + :ivar compute_id: ARM resource ID of the compute resource. + :vartype compute_id: str + :ivar display_name: Display name of job. + :vartype display_name: str + :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :vartype experiment_name: str + :ivar identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar job_type: [Required] Specifies the type of job. Required. Known values are: "AutoML", + "Command", "Labeling", "Sweep", "Pipeline", and "Spark". + :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :ivar notification_setting: Notification setting for the job. + :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting + :ivar secrets_configuration: Configuration for secrets to be made available during runtime. + :vartype secrets_configuration: dict[str, + ~azure.mgmt.machinelearningservices.models.SecretConfiguration] + :ivar services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", and "Scheduled". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus + :ivar archives: Archive files used in the job. + :vartype archives: list[str] + :ivar args: Arguments for the job. + :vartype args: str + :ivar code_id: [Required] ARM resource ID of the code asset. Required. + :vartype code_id: str + :ivar conf: Spark configured properties. + :vartype conf: dict[str, str] + :ivar entry: [Required] The entry to execute on startup of the job. Required. + :vartype entry: ~azure.mgmt.machinelearningservices.models.SparkJobEntry + :ivar environment_id: The ARM resource ID of the Environment specification for the job. + :vartype environment_id: str + :ivar files: Files used in the job. + :vartype files: list[str] + :ivar inputs: Mapping of input data bindings used in the job. + :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] + :ivar jars: Jar files used in the job. + :vartype jars: list[str] + :ivar outputs: Mapping of output data bindings used in the job. + :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :ivar py_files: Python files used in the job. + :vartype py_files: list[str] + :ivar queue_settings: Queue settings for the job. + :vartype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings + :ivar resources: Compute Resource configuration for the job. + :vartype resources: ~azure.mgmt.machinelearningservices.models.SparkResourceConfiguration + """ + + _validation = { + "job_type": {"required": True}, + "status": {"readonly": True}, + "code_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "entry": {"required": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "notification_setting": {"key": "notificationSetting", "type": "NotificationSetting"}, + "secrets_configuration": {"key": "secretsConfiguration", "type": "{SecretConfiguration}"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, + "archives": {"key": "archives", "type": "[str]"}, + "args": {"key": "args", "type": "str"}, + "code_id": {"key": "codeId", "type": "str"}, + "conf": {"key": "conf", "type": "{str}"}, + "entry": {"key": "entry", "type": "SparkJobEntry"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "files": {"key": "files", "type": "[str]"}, + "inputs": {"key": "inputs", "type": "{JobInput}"}, + "jars": {"key": "jars", "type": "[str]"}, + "outputs": {"key": "outputs", "type": "{JobOutput}"}, + "py_files": {"key": "pyFiles", "type": "[str]"}, + "queue_settings": {"key": "queueSettings", "type": "QueueSettings"}, + "resources": {"key": "resources", "type": "SparkResourceConfiguration"}, + } + + def __init__( # pylint: disable=too-many-locals + self, + *, + code_id: str, + entry: "_models.SparkJobEntry", + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + component_id: Optional[str] = None, + compute_id: Optional[str] = None, + display_name: Optional[str] = None, + experiment_name: str = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: bool = False, + notification_setting: Optional["_models.NotificationSetting"] = None, + secrets_configuration: Optional[Dict[str, "_models.SecretConfiguration"]] = None, + services: Optional[Dict[str, "_models.JobService"]] = None, + archives: Optional[List[str]] = None, + args: Optional[str] = None, + conf: Optional[Dict[str, str]] = None, + environment_id: Optional[str] = None, + files: Optional[List[str]] = None, + inputs: Optional[Dict[str, "_models.JobInput"]] = None, + jars: Optional[List[str]] = None, + outputs: Optional[Dict[str, "_models.JobOutput"]] = None, + py_files: Optional[List[str]] = None, + queue_settings: Optional["_models.QueueSettings"] = None, + resources: Optional["_models.SparkResourceConfiguration"] = None, + **kwargs: Any + ) -> None: + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword component_id: ARM resource ID of the component resource. + :paramtype component_id: str + :keyword compute_id: ARM resource ID of the compute resource. + :paramtype compute_id: str + :keyword display_name: Display name of job. + :paramtype display_name: str + :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :paramtype experiment_name: str + :keyword identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword notification_setting: Notification setting for the job. + :paramtype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting + :keyword secrets_configuration: Configuration for secrets to be made available during runtime. + :paramtype secrets_configuration: dict[str, + ~azure.mgmt.machinelearningservices.models.SecretConfiguration] + :keyword services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :keyword archives: Archive files used in the job. + :paramtype archives: list[str] + :keyword args: Arguments for the job. + :paramtype args: str + :keyword code_id: [Required] ARM resource ID of the code asset. Required. + :paramtype code_id: str + :keyword conf: Spark configured properties. + :paramtype conf: dict[str, str] + :keyword entry: [Required] The entry to execute on startup of the job. Required. + :paramtype entry: ~azure.mgmt.machinelearningservices.models.SparkJobEntry + :keyword environment_id: The ARM resource ID of the Environment specification for the job. + :paramtype environment_id: str + :keyword files: Files used in the job. + :paramtype files: list[str] + :keyword inputs: Mapping of input data bindings used in the job. + :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] + :keyword jars: Jar files used in the job. + :paramtype jars: list[str] + :keyword outputs: Mapping of output data bindings used in the job. + :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :keyword py_files: Python files used in the job. + :paramtype py_files: list[str] + :keyword queue_settings: Queue settings for the job. + :paramtype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings + :keyword resources: Compute Resource configuration for the job. + :paramtype resources: ~azure.mgmt.machinelearningservices.models.SparkResourceConfiguration + """ + super().__init__( + description=description, + properties=properties, + tags=tags, + component_id=component_id, + compute_id=compute_id, + display_name=display_name, + experiment_name=experiment_name, + identity=identity, + is_archived=is_archived, + notification_setting=notification_setting, + secrets_configuration=secrets_configuration, + services=services, + **kwargs + ) + self.job_type: str = "Spark" + self.archives = archives + self.args = args + self.code_id = code_id + self.conf = conf + self.entry = entry + self.environment_id = environment_id + self.files = files + self.inputs = inputs + self.jars = jars + self.outputs = outputs + self.py_files = py_files + self.queue_settings = queue_settings + self.resources = resources + + +class SparkJobEntry(_serialization.Model): + """Spark job entry point definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + SparkJobPythonEntry, SparkJobScalaEntry + + All required parameters must be populated in order to send to Azure. + + :ivar spark_job_entry_type: [Required] Type of the job's entry point. Required. Known values + are: "SparkJobPythonEntry" and "SparkJobScalaEntry". + :vartype spark_job_entry_type: str or + ~azure.mgmt.machinelearningservices.models.SparkJobEntryType + """ + + _validation = { + "spark_job_entry_type": {"required": True}, + } + + _attribute_map = { + "spark_job_entry_type": {"key": "sparkJobEntryType", "type": "str"}, + } + + _subtype_map = { + "spark_job_entry_type": { + "SparkJobPythonEntry": "SparkJobPythonEntry", + "SparkJobScalaEntry": "SparkJobScalaEntry", + } + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.spark_job_entry_type: Optional[str] = None + + +class SparkJobPythonEntry(SparkJobEntry): + """SparkJobPythonEntry. + + All required parameters must be populated in order to send to Azure. + + :ivar spark_job_entry_type: [Required] Type of the job's entry point. Required. Known values + are: "SparkJobPythonEntry" and "SparkJobScalaEntry". + :vartype spark_job_entry_type: str or + ~azure.mgmt.machinelearningservices.models.SparkJobEntryType + :ivar file: [Required] Relative python file path for job entry point. Required. + :vartype file: str + """ + + _validation = { + "spark_job_entry_type": {"required": True}, + "file": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "spark_job_entry_type": {"key": "sparkJobEntryType", "type": "str"}, + "file": {"key": "file", "type": "str"}, + } + + def __init__(self, *, file: str, **kwargs: Any) -> None: + """ + :keyword file: [Required] Relative python file path for job entry point. Required. + :paramtype file: str + """ + super().__init__(**kwargs) + self.spark_job_entry_type: str = "SparkJobPythonEntry" + self.file = file + + +class SparkJobScalaEntry(SparkJobEntry): + """SparkJobScalaEntry. + + All required parameters must be populated in order to send to Azure. + + :ivar spark_job_entry_type: [Required] Type of the job's entry point. Required. Known values + are: "SparkJobPythonEntry" and "SparkJobScalaEntry". + :vartype spark_job_entry_type: str or + ~azure.mgmt.machinelearningservices.models.SparkJobEntryType + :ivar class_name: [Required] Scala class name used as entry point. Required. + :vartype class_name: str + """ + + _validation = { + "spark_job_entry_type": {"required": True}, + "class_name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + } + + _attribute_map = { + "spark_job_entry_type": {"key": "sparkJobEntryType", "type": "str"}, + "class_name": {"key": "className", "type": "str"}, + } + + def __init__(self, *, class_name: str, **kwargs: Any) -> None: + """ + :keyword class_name: [Required] Scala class name used as entry point. Required. + :paramtype class_name: str + """ + super().__init__(**kwargs) + self.spark_job_entry_type: str = "SparkJobScalaEntry" + self.class_name = class_name + + +class SparkResourceConfiguration(_serialization.Model): + """SparkResourceConfiguration. + + :ivar instance_type: Optional type of VM used as supported by the compute target. + :vartype instance_type: str + :ivar runtime_version: Version of spark runtime used for the job. + :vartype runtime_version: str + """ + + _attribute_map = { + "instance_type": {"key": "instanceType", "type": "str"}, + "runtime_version": {"key": "runtimeVersion", "type": "str"}, + } + + def __init__(self, *, instance_type: Optional[str] = None, runtime_version: str = "3.1", **kwargs: Any) -> None: + """ + :keyword instance_type: Optional type of VM used as supported by the compute target. + :paramtype instance_type: str + :keyword runtime_version: Version of spark runtime used for the job. + :paramtype runtime_version: str + """ + super().__init__(**kwargs) + self.instance_type = instance_type + self.runtime_version = runtime_version class SslConfiguration(_serialization.Model): @@ -19516,6 +29485,130 @@ def __init__( self.stack_meta_learner_type = stack_meta_learner_type +class StaticInputData(MonitoringInputDataBase): + """Static input data definition. + + All required parameters must be populated in order to send to Azure. + + :ivar columns: Mapping of column names to special uses. + :vartype columns: dict[str, str] + :ivar data_context: The context metadata of the data source. + :vartype data_context: str + :ivar input_data_type: [Required] Specifies the type of signal to monitor. Required. Known + values are: "Static", "Trailing", and "Fixed". + :vartype input_data_type: str or + ~azure.mgmt.machinelearningservices.models.MonitoringInputDataType + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :ivar uri: [Required] Input Asset URI. Required. + :vartype uri: str + :ivar preprocessing_component_id: The ARM resource ID of the component resource used to + preprocess the data. + :vartype preprocessing_component_id: str + :ivar window_end: [Required] The end date of the data window. Required. + :vartype window_end: ~datetime.datetime + :ivar window_start: [Required] The start date of the data window. Required. + :vartype window_start: ~datetime.datetime + """ + + _validation = { + "input_data_type": {"required": True}, + "job_input_type": {"required": True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "window_end": {"required": True}, + "window_start": {"required": True}, + } + + _attribute_map = { + "columns": {"key": "columns", "type": "{str}"}, + "data_context": {"key": "dataContext", "type": "str"}, + "input_data_type": {"key": "inputDataType", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + "preprocessing_component_id": {"key": "preprocessingComponentId", "type": "str"}, + "window_end": {"key": "windowEnd", "type": "iso-8601"}, + "window_start": {"key": "windowStart", "type": "iso-8601"}, + } + + def __init__( + self, + *, + job_input_type: Union[str, "_models.JobInputType"], + uri: str, + window_end: datetime.datetime, + window_start: datetime.datetime, + columns: Optional[Dict[str, str]] = None, + data_context: Optional[str] = None, + preprocessing_component_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword columns: Mapping of column names to special uses. + :paramtype columns: dict[str, str] + :keyword data_context: The context metadata of the data source. + :paramtype data_context: str + :keyword job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". + :paramtype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :keyword uri: [Required] Input Asset URI. Required. + :paramtype uri: str + :keyword preprocessing_component_id: The ARM resource ID of the component resource used to + preprocess the data. + :paramtype preprocessing_component_id: str + :keyword window_end: [Required] The end date of the data window. Required. + :paramtype window_end: ~datetime.datetime + :keyword window_start: [Required] The start date of the data window. Required. + :paramtype window_start: ~datetime.datetime + """ + super().__init__(columns=columns, data_context=data_context, job_input_type=job_input_type, uri=uri, **kwargs) + self.input_data_type: str = "Static" + self.preprocessing_component_id = preprocessing_component_id + self.window_end = window_end + self.window_start = window_start + + +class StatusMessage(_serialization.Model): + """Active message associated with project. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: Service-defined message code. + :vartype code: str + :ivar created_date_time: Time in UTC at which the message was created. + :vartype created_date_time: ~datetime.datetime + :ivar level: Severity level of message. Known values are: "Error", "Information", and + "Warning". + :vartype level: str or ~azure.mgmt.machinelearningservices.models.StatusMessageLevel + :ivar message: A human-readable representation of the message code. + :vartype message: str + """ + + _validation = { + "code": {"readonly": True}, + "created_date_time": {"readonly": True}, + "level": {"readonly": True}, + "message": {"readonly": True}, + } + + _attribute_map = { + "code": {"key": "code", "type": "str"}, + "created_date_time": {"key": "createdDateTime", "type": "iso-8601"}, + "level": {"key": "level", "type": "str"}, + "message": {"key": "message", "type": "str"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.code = None + self.created_date_time = None + self.level = None + self.message = None + + class StorageAccountDetails(_serialization.Model): """Details of storage account to be used for the Registry. @@ -19585,15 +29678,23 @@ class SweepJob(JobBaseProperties): # pylint: disable=too-many-instance-attribut :ivar is_archived: Is the asset archived?. :vartype is_archived: bool :ivar job_type: [Required] Specifies the type of job. Required. Known values are: "AutoML", - "Command", "Sweep", and "Pipeline". + "Command", "Labeling", "Sweep", "Pipeline", and "Spark". :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :ivar notification_setting: Notification setting for the job. + :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting + :ivar secrets_configuration: Configuration for secrets to be made available during runtime. + :vartype secrets_configuration: dict[str, + ~azure.mgmt.machinelearningservices.models.SecretConfiguration] :ivar services: List of JobEndpoints. For local jobs, a job endpoint will have an endpoint value of FileStreamObject. :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", - "Canceled", "NotResponding", "Paused", and "Unknown". + "Canceled", "NotResponding", "Paused", "Unknown", and "Scheduled". :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus + :ivar component_configuration: Component Configuration for sweep over component. + :vartype component_configuration: + ~azure.mgmt.machinelearningservices.models.ComponentConfiguration :ivar early_termination: Early termination policies enable canceling poor-performing runs before they complete. :vartype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy @@ -19605,6 +29706,10 @@ class SweepJob(JobBaseProperties): # pylint: disable=too-many-instance-attribut :vartype objective: ~azure.mgmt.machinelearningservices.models.Objective :ivar outputs: Mapping of output data bindings used in the job. :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :ivar queue_settings: Queue settings for the job. + :vartype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings + :ivar resources: Compute Resource configuration for the job. + :vartype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration :ivar sampling_algorithm: [Required] The hyperparameter sampling algorithm. Required. :vartype sampling_algorithm: ~azure.mgmt.machinelearningservices.models.SamplingAlgorithm :ivar search_space: [Required] A dictionary containing each parameter and its distribution. The @@ -19634,19 +29739,24 @@ class SweepJob(JobBaseProperties): # pylint: disable=too-many-instance-attribut "identity": {"key": "identity", "type": "IdentityConfiguration"}, "is_archived": {"key": "isArchived", "type": "bool"}, "job_type": {"key": "jobType", "type": "str"}, + "notification_setting": {"key": "notificationSetting", "type": "NotificationSetting"}, + "secrets_configuration": {"key": "secretsConfiguration", "type": "{SecretConfiguration}"}, "services": {"key": "services", "type": "{JobService}"}, "status": {"key": "status", "type": "str"}, + "component_configuration": {"key": "componentConfiguration", "type": "ComponentConfiguration"}, "early_termination": {"key": "earlyTermination", "type": "EarlyTerminationPolicy"}, "inputs": {"key": "inputs", "type": "{JobInput}"}, "limits": {"key": "limits", "type": "SweepJobLimits"}, "objective": {"key": "objective", "type": "Objective"}, "outputs": {"key": "outputs", "type": "{JobOutput}"}, + "queue_settings": {"key": "queueSettings", "type": "QueueSettings"}, + "resources": {"key": "resources", "type": "JobResourceConfiguration"}, "sampling_algorithm": {"key": "samplingAlgorithm", "type": "SamplingAlgorithm"}, "search_space": {"key": "searchSpace", "type": "object"}, "trial": {"key": "trial", "type": "TrialComponent"}, } - def __init__( + def __init__( # pylint: disable=too-many-locals self, *, objective: "_models.Objective", @@ -19662,11 +29772,16 @@ def __init__( experiment_name: str = "Default", identity: Optional["_models.IdentityConfiguration"] = None, is_archived: bool = False, + notification_setting: Optional["_models.NotificationSetting"] = None, + secrets_configuration: Optional[Dict[str, "_models.SecretConfiguration"]] = None, services: Optional[Dict[str, "_models.JobService"]] = None, + component_configuration: Optional["_models.ComponentConfiguration"] = None, early_termination: Optional["_models.EarlyTerminationPolicy"] = None, inputs: Optional[Dict[str, "_models.JobInput"]] = None, limits: Optional["_models.SweepJobLimits"] = None, outputs: Optional[Dict[str, "_models.JobOutput"]] = None, + queue_settings: Optional["_models.QueueSettings"] = None, + resources: Optional["_models.JobResourceConfiguration"] = None, **kwargs: Any ) -> None: """ @@ -19691,9 +29806,17 @@ def __init__( :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration :keyword is_archived: Is the asset archived?. :paramtype is_archived: bool + :keyword notification_setting: Notification setting for the job. + :paramtype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting + :keyword secrets_configuration: Configuration for secrets to be made available during runtime. + :paramtype secrets_configuration: dict[str, + ~azure.mgmt.machinelearningservices.models.SecretConfiguration] :keyword services: List of JobEndpoints. For local jobs, a job endpoint will have an endpoint value of FileStreamObject. :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :keyword component_configuration: Component Configuration for sweep over component. + :paramtype component_configuration: + ~azure.mgmt.machinelearningservices.models.ComponentConfiguration :keyword early_termination: Early termination policies enable canceling poor-performing runs before they complete. :paramtype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy @@ -19705,6 +29828,10 @@ def __init__( :paramtype objective: ~azure.mgmt.machinelearningservices.models.Objective :keyword outputs: Mapping of output data bindings used in the job. :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :keyword queue_settings: Queue settings for the job. + :paramtype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings + :keyword resources: Compute Resource configuration for the job. + :paramtype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration :keyword sampling_algorithm: [Required] The hyperparameter sampling algorithm. Required. :paramtype sampling_algorithm: ~azure.mgmt.machinelearningservices.models.SamplingAlgorithm :keyword search_space: [Required] A dictionary containing each parameter and its distribution. @@ -19723,15 +29850,20 @@ def __init__( experiment_name=experiment_name, identity=identity, is_archived=is_archived, + notification_setting=notification_setting, + secrets_configuration=secrets_configuration, services=services, **kwargs ) self.job_type: str = "Sweep" + self.component_configuration = component_configuration self.early_termination = early_termination self.inputs = inputs self.limits = limits self.objective = objective self.outputs = outputs + self.queue_settings = queue_settings + self.resources = resources self.sampling_algorithm = sampling_algorithm self.search_space = search_space self.trial = trial @@ -20186,6 +30318,372 @@ def __init__(self, **kwargs: Any) -> None: self.version = None +class TableFixedParameters(_serialization.Model): # pylint: disable=too-many-instance-attributes + """Fixed training parameters that won't be swept over during AutoML Table training. + + :ivar booster: Specify the boosting type, e.g gbdt for XGBoost. + :vartype booster: str + :ivar boosting_type: Specify the boosting type, e.g gbdt for LightGBM. + :vartype boosting_type: str + :ivar grow_policy: Specify the grow policy, which controls the way new nodes are added to the + tree. + :vartype grow_policy: str + :ivar learning_rate: The learning rate for the training procedure. + :vartype learning_rate: float + :ivar max_bin: Specify the Maximum number of discrete bins to bucket continuous features . + :vartype max_bin: int + :ivar max_depth: Specify the max depth to limit the tree depth explicitly. + :vartype max_depth: int + :ivar max_leaves: Specify the max leaves to limit the tree leaves explicitly. + :vartype max_leaves: int + :ivar min_data_in_leaf: The minimum number of data per leaf. + :vartype min_data_in_leaf: int + :ivar min_split_gain: Minimum loss reduction required to make a further partition on a leaf + node of the tree. + :vartype min_split_gain: float + :ivar model_name: The name of the model to train. + :vartype model_name: str + :ivar n_estimators: Specify the number of trees (or rounds) in an model. + :vartype n_estimators: int + :ivar num_leaves: Specify the number of leaves. + :vartype num_leaves: int + :ivar preprocessor_name: The name of the preprocessor to use. + :vartype preprocessor_name: str + :ivar reg_alpha: L1 regularization term on weights. + :vartype reg_alpha: float + :ivar reg_lambda: L2 regularization term on weights. + :vartype reg_lambda: float + :ivar subsample: Subsample ratio of the training instance. + :vartype subsample: float + :ivar subsample_freq: Frequency of subsample. + :vartype subsample_freq: float + :ivar tree_method: Specify the tree method. + :vartype tree_method: str + :ivar with_mean: If true, center before scaling the data with StandardScalar. + :vartype with_mean: bool + :ivar with_std: If true, scaling the data with Unit Variance with StandardScalar. + :vartype with_std: bool + """ + + _attribute_map = { + "booster": {"key": "booster", "type": "str"}, + "boosting_type": {"key": "boostingType", "type": "str"}, + "grow_policy": {"key": "growPolicy", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "float"}, + "max_bin": {"key": "maxBin", "type": "int"}, + "max_depth": {"key": "maxDepth", "type": "int"}, + "max_leaves": {"key": "maxLeaves", "type": "int"}, + "min_data_in_leaf": {"key": "minDataInLeaf", "type": "int"}, + "min_split_gain": {"key": "minSplitGain", "type": "float"}, + "model_name": {"key": "modelName", "type": "str"}, + "n_estimators": {"key": "nEstimators", "type": "int"}, + "num_leaves": {"key": "numLeaves", "type": "int"}, + "preprocessor_name": {"key": "preprocessorName", "type": "str"}, + "reg_alpha": {"key": "regAlpha", "type": "float"}, + "reg_lambda": {"key": "regLambda", "type": "float"}, + "subsample": {"key": "subsample", "type": "float"}, + "subsample_freq": {"key": "subsampleFreq", "type": "float"}, + "tree_method": {"key": "treeMethod", "type": "str"}, + "with_mean": {"key": "withMean", "type": "bool"}, + "with_std": {"key": "withStd", "type": "bool"}, + } + + def __init__( + self, + *, + booster: Optional[str] = None, + boosting_type: Optional[str] = None, + grow_policy: Optional[str] = None, + learning_rate: Optional[float] = None, + max_bin: Optional[int] = None, + max_depth: Optional[int] = None, + max_leaves: Optional[int] = None, + min_data_in_leaf: Optional[int] = None, + min_split_gain: Optional[float] = None, + model_name: Optional[str] = None, + n_estimators: Optional[int] = None, + num_leaves: Optional[int] = None, + preprocessor_name: Optional[str] = None, + reg_alpha: Optional[float] = None, + reg_lambda: Optional[float] = None, + subsample: Optional[float] = None, + subsample_freq: Optional[float] = None, + tree_method: Optional[str] = None, + with_mean: bool = False, + with_std: bool = False, + **kwargs: Any + ) -> None: + """ + :keyword booster: Specify the boosting type, e.g gbdt for XGBoost. + :paramtype booster: str + :keyword boosting_type: Specify the boosting type, e.g gbdt for LightGBM. + :paramtype boosting_type: str + :keyword grow_policy: Specify the grow policy, which controls the way new nodes are added to + the tree. + :paramtype grow_policy: str + :keyword learning_rate: The learning rate for the training procedure. + :paramtype learning_rate: float + :keyword max_bin: Specify the Maximum number of discrete bins to bucket continuous features . + :paramtype max_bin: int + :keyword max_depth: Specify the max depth to limit the tree depth explicitly. + :paramtype max_depth: int + :keyword max_leaves: Specify the max leaves to limit the tree leaves explicitly. + :paramtype max_leaves: int + :keyword min_data_in_leaf: The minimum number of data per leaf. + :paramtype min_data_in_leaf: int + :keyword min_split_gain: Minimum loss reduction required to make a further partition on a leaf + node of the tree. + :paramtype min_split_gain: float + :keyword model_name: The name of the model to train. + :paramtype model_name: str + :keyword n_estimators: Specify the number of trees (or rounds) in an model. + :paramtype n_estimators: int + :keyword num_leaves: Specify the number of leaves. + :paramtype num_leaves: int + :keyword preprocessor_name: The name of the preprocessor to use. + :paramtype preprocessor_name: str + :keyword reg_alpha: L1 regularization term on weights. + :paramtype reg_alpha: float + :keyword reg_lambda: L2 regularization term on weights. + :paramtype reg_lambda: float + :keyword subsample: Subsample ratio of the training instance. + :paramtype subsample: float + :keyword subsample_freq: Frequency of subsample. + :paramtype subsample_freq: float + :keyword tree_method: Specify the tree method. + :paramtype tree_method: str + :keyword with_mean: If true, center before scaling the data with StandardScalar. + :paramtype with_mean: bool + :keyword with_std: If true, scaling the data with Unit Variance with StandardScalar. + :paramtype with_std: bool + """ + super().__init__(**kwargs) + self.booster = booster + self.boosting_type = boosting_type + self.grow_policy = grow_policy + self.learning_rate = learning_rate + self.max_bin = max_bin + self.max_depth = max_depth + self.max_leaves = max_leaves + self.min_data_in_leaf = min_data_in_leaf + self.min_split_gain = min_split_gain + self.model_name = model_name + self.n_estimators = n_estimators + self.num_leaves = num_leaves + self.preprocessor_name = preprocessor_name + self.reg_alpha = reg_alpha + self.reg_lambda = reg_lambda + self.subsample = subsample + self.subsample_freq = subsample_freq + self.tree_method = tree_method + self.with_mean = with_mean + self.with_std = with_std + + +class TableParameterSubspace(_serialization.Model): # pylint: disable=too-many-instance-attributes + """TableParameterSubspace. + + :ivar booster: Specify the boosting type, e.g gbdt for XGBoost. + :vartype booster: str + :ivar boosting_type: Specify the boosting type, e.g gbdt for LightGBM. + :vartype boosting_type: str + :ivar grow_policy: Specify the grow policy, which controls the way new nodes are added to the + tree. + :vartype grow_policy: str + :ivar learning_rate: The learning rate for the training procedure. + :vartype learning_rate: str + :ivar max_bin: Specify the Maximum number of discrete bins to bucket continuous features . + :vartype max_bin: str + :ivar max_depth: Specify the max depth to limit the tree depth explicitly. + :vartype max_depth: str + :ivar max_leaves: Specify the max leaves to limit the tree leaves explicitly. + :vartype max_leaves: str + :ivar min_data_in_leaf: The minimum number of data per leaf. + :vartype min_data_in_leaf: str + :ivar min_split_gain: Minimum loss reduction required to make a further partition on a leaf + node of the tree. + :vartype min_split_gain: str + :ivar model_name: The name of the model to train. + :vartype model_name: str + :ivar n_estimators: Specify the number of trees (or rounds) in an model. + :vartype n_estimators: str + :ivar num_leaves: Specify the number of leaves. + :vartype num_leaves: str + :ivar preprocessor_name: The name of the preprocessor to use. + :vartype preprocessor_name: str + :ivar reg_alpha: L1 regularization term on weights. + :vartype reg_alpha: str + :ivar reg_lambda: L2 regularization term on weights. + :vartype reg_lambda: str + :ivar subsample: Subsample ratio of the training instance. + :vartype subsample: str + :ivar subsample_freq: Frequency of subsample. + :vartype subsample_freq: str + :ivar tree_method: Specify the tree method. + :vartype tree_method: str + :ivar with_mean: If true, center before scaling the data with StandardScalar. + :vartype with_mean: str + :ivar with_std: If true, scaling the data with Unit Variance with StandardScalar. + :vartype with_std: str + """ + + _attribute_map = { + "booster": {"key": "booster", "type": "str"}, + "boosting_type": {"key": "boostingType", "type": "str"}, + "grow_policy": {"key": "growPolicy", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "str"}, + "max_bin": {"key": "maxBin", "type": "str"}, + "max_depth": {"key": "maxDepth", "type": "str"}, + "max_leaves": {"key": "maxLeaves", "type": "str"}, + "min_data_in_leaf": {"key": "minDataInLeaf", "type": "str"}, + "min_split_gain": {"key": "minSplitGain", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "n_estimators": {"key": "nEstimators", "type": "str"}, + "num_leaves": {"key": "numLeaves", "type": "str"}, + "preprocessor_name": {"key": "preprocessorName", "type": "str"}, + "reg_alpha": {"key": "regAlpha", "type": "str"}, + "reg_lambda": {"key": "regLambda", "type": "str"}, + "subsample": {"key": "subsample", "type": "str"}, + "subsample_freq": {"key": "subsampleFreq", "type": "str"}, + "tree_method": {"key": "treeMethod", "type": "str"}, + "with_mean": {"key": "withMean", "type": "str"}, + "with_std": {"key": "withStd", "type": "str"}, + } + + def __init__( + self, + *, + booster: Optional[str] = None, + boosting_type: Optional[str] = None, + grow_policy: Optional[str] = None, + learning_rate: Optional[str] = None, + max_bin: Optional[str] = None, + max_depth: Optional[str] = None, + max_leaves: Optional[str] = None, + min_data_in_leaf: Optional[str] = None, + min_split_gain: Optional[str] = None, + model_name: Optional[str] = None, + n_estimators: Optional[str] = None, + num_leaves: Optional[str] = None, + preprocessor_name: Optional[str] = None, + reg_alpha: Optional[str] = None, + reg_lambda: Optional[str] = None, + subsample: Optional[str] = None, + subsample_freq: Optional[str] = None, + tree_method: Optional[str] = None, + with_mean: Optional[str] = None, + with_std: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword booster: Specify the boosting type, e.g gbdt for XGBoost. + :paramtype booster: str + :keyword boosting_type: Specify the boosting type, e.g gbdt for LightGBM. + :paramtype boosting_type: str + :keyword grow_policy: Specify the grow policy, which controls the way new nodes are added to + the tree. + :paramtype grow_policy: str + :keyword learning_rate: The learning rate for the training procedure. + :paramtype learning_rate: str + :keyword max_bin: Specify the Maximum number of discrete bins to bucket continuous features . + :paramtype max_bin: str + :keyword max_depth: Specify the max depth to limit the tree depth explicitly. + :paramtype max_depth: str + :keyword max_leaves: Specify the max leaves to limit the tree leaves explicitly. + :paramtype max_leaves: str + :keyword min_data_in_leaf: The minimum number of data per leaf. + :paramtype min_data_in_leaf: str + :keyword min_split_gain: Minimum loss reduction required to make a further partition on a leaf + node of the tree. + :paramtype min_split_gain: str + :keyword model_name: The name of the model to train. + :paramtype model_name: str + :keyword n_estimators: Specify the number of trees (or rounds) in an model. + :paramtype n_estimators: str + :keyword num_leaves: Specify the number of leaves. + :paramtype num_leaves: str + :keyword preprocessor_name: The name of the preprocessor to use. + :paramtype preprocessor_name: str + :keyword reg_alpha: L1 regularization term on weights. + :paramtype reg_alpha: str + :keyword reg_lambda: L2 regularization term on weights. + :paramtype reg_lambda: str + :keyword subsample: Subsample ratio of the training instance. + :paramtype subsample: str + :keyword subsample_freq: Frequency of subsample. + :paramtype subsample_freq: str + :keyword tree_method: Specify the tree method. + :paramtype tree_method: str + :keyword with_mean: If true, center before scaling the data with StandardScalar. + :paramtype with_mean: str + :keyword with_std: If true, scaling the data with Unit Variance with StandardScalar. + :paramtype with_std: str + """ + super().__init__(**kwargs) + self.booster = booster + self.boosting_type = boosting_type + self.grow_policy = grow_policy + self.learning_rate = learning_rate + self.max_bin = max_bin + self.max_depth = max_depth + self.max_leaves = max_leaves + self.min_data_in_leaf = min_data_in_leaf + self.min_split_gain = min_split_gain + self.model_name = model_name + self.n_estimators = n_estimators + self.num_leaves = num_leaves + self.preprocessor_name = preprocessor_name + self.reg_alpha = reg_alpha + self.reg_lambda = reg_lambda + self.subsample = subsample + self.subsample_freq = subsample_freq + self.tree_method = tree_method + self.with_mean = with_mean + self.with_std = with_std + + +class TableSweepSettings(_serialization.Model): + """TableSweepSettings. + + All required parameters must be populated in order to send to Azure. + + :ivar early_termination: Type of early termination policy for the sweeping job. + :vartype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy + :ivar sampling_algorithm: [Required] Type of sampling algorithm. Required. Known values are: + "Grid", "Random", and "Bayesian". + :vartype sampling_algorithm: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + """ + + _validation = { + "sampling_algorithm": {"required": True}, + } + + _attribute_map = { + "early_termination": {"key": "earlyTermination", "type": "EarlyTerminationPolicy"}, + "sampling_algorithm": {"key": "samplingAlgorithm", "type": "str"}, + } + + def __init__( + self, + *, + sampling_algorithm: Union[str, "_models.SamplingAlgorithmType"], + early_termination: Optional["_models.EarlyTerminationPolicy"] = None, + **kwargs: Any + ) -> None: + """ + :keyword early_termination: Type of early termination policy for the sweeping job. + :paramtype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy + :keyword sampling_algorithm: [Required] Type of sampling algorithm. Required. Known values are: + "Grid", "Random", and "Bayesian". + :paramtype sampling_algorithm: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + """ + super().__init__(**kwargs) + self.early_termination = early_termination + self.sampling_algorithm = sampling_algorithm + + class TableVerticalFeaturizationSettings(FeaturizationSettings): """Featurization Configuration. @@ -20275,8 +30773,14 @@ class TableVerticalLimitSettings(_serialization.Model): :vartype max_concurrent_trials: int :ivar max_cores_per_trial: Max cores per iteration. :vartype max_cores_per_trial: int + :ivar max_nodes: Maximum nodes to use for the experiment. + :vartype max_nodes: int :ivar max_trials: Number of iterations. :vartype max_trials: int + :ivar sweep_concurrent_trials: Number of concurrent sweeping runs that user wants to trigger. + :vartype sweep_concurrent_trials: int + :ivar sweep_trials: Number of sweeping runs that user wants to trigger. + :vartype sweep_trials: int :ivar timeout: AutoML job timeout. :vartype timeout: ~datetime.timedelta :ivar trial_timeout: Iteration timeout. @@ -20288,7 +30792,10 @@ class TableVerticalLimitSettings(_serialization.Model): "exit_score": {"key": "exitScore", "type": "float"}, "max_concurrent_trials": {"key": "maxConcurrentTrials", "type": "int"}, "max_cores_per_trial": {"key": "maxCoresPerTrial", "type": "int"}, + "max_nodes": {"key": "maxNodes", "type": "int"}, "max_trials": {"key": "maxTrials", "type": "int"}, + "sweep_concurrent_trials": {"key": "sweepConcurrentTrials", "type": "int"}, + "sweep_trials": {"key": "sweepTrials", "type": "int"}, "timeout": {"key": "timeout", "type": "duration"}, "trial_timeout": {"key": "trialTimeout", "type": "duration"}, } @@ -20300,7 +30807,10 @@ def __init__( exit_score: Optional[float] = None, max_concurrent_trials: int = 1, max_cores_per_trial: int = -1, + max_nodes: int = 1, max_trials: int = 1000, + sweep_concurrent_trials: int = 0, + sweep_trials: int = 0, timeout: datetime.timedelta = "PT6H", trial_timeout: datetime.timedelta = "PT30M", **kwargs: Any @@ -20315,8 +30825,15 @@ def __init__( :paramtype max_concurrent_trials: int :keyword max_cores_per_trial: Max cores per iteration. :paramtype max_cores_per_trial: int + :keyword max_nodes: Maximum nodes to use for the experiment. + :paramtype max_nodes: int :keyword max_trials: Number of iterations. :paramtype max_trials: int + :keyword sweep_concurrent_trials: Number of concurrent sweeping runs that user wants to + trigger. + :paramtype sweep_concurrent_trials: int + :keyword sweep_trials: Number of sweeping runs that user wants to trigger. + :paramtype sweep_trials: int :keyword timeout: AutoML job timeout. :paramtype timeout: ~datetime.timedelta :keyword trial_timeout: Iteration timeout. @@ -20327,7 +30844,10 @@ def __init__( self.exit_score = exit_score self.max_concurrent_trials = max_concurrent_trials self.max_cores_per_trial = max_cores_per_trial + self.max_nodes = max_nodes self.max_trials = max_trials + self.sweep_concurrent_trials = sweep_concurrent_trials + self.sweep_trials = sweep_trials self.timeout = timeout self.trial_timeout = trial_timeout @@ -20399,7 +30919,7 @@ class TensorFlow(DistributionConfiguration): All required parameters must be populated in order to send to Azure. :ivar distribution_type: [Required] Specifies the type of distribution framework. Required. - Known values are: "PyTorch", "TensorFlow", and "Mpi". + Known values are: "PyTorch", "TensorFlow", "Mpi", and "Ray". :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType :ivar parameter_server_count: Number of parameter server tasks. :vartype parameter_server_count: int @@ -20430,7 +30950,7 @@ def __init__(self, *, parameter_server_count: int = 0, worker_count: Optional[in self.worker_count = worker_count -class TextClassification(NlpVertical, AutoMLVertical): +class TextClassification(NlpVertical, AutoMLVertical): # pylint: disable=too-many-instance-attributes """Text Classification task in AutoML NLP vertical. NLP - Natural Language Processing. @@ -20452,8 +30972,16 @@ class TextClassification(NlpVertical, AutoMLVertical): :ivar featurization_settings: Featurization inputs needed for AutoML job. :vartype featurization_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :ivar fixed_parameters: Model/training parameters that will remain constant throughout + training. + :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters :ivar limit_settings: Execution constraints for AutoMLJob. :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] + :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings :ivar validation_data: Validation data inputs. :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :ivar primary_metric: Primary metric for Text-Classification task. Known values are: @@ -20474,7 +31002,10 @@ class TextClassification(NlpVertical, AutoMLVertical): "task_type": {"key": "taskType", "type": "str"}, "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, "featurization_settings": {"key": "featurizationSettings", "type": "NlpVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "NlpFixedParameters"}, "limit_settings": {"key": "limitSettings", "type": "NlpVerticalLimitSettings"}, + "search_space": {"key": "searchSpace", "type": "[NlpParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "NlpSweepSettings"}, "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, "primary_metric": {"key": "primaryMetric", "type": "str"}, } @@ -20486,7 +31017,10 @@ def __init__( log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, target_column_name: Optional[str] = None, featurization_settings: Optional["_models.NlpVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.NlpFixedParameters"] = None, limit_settings: Optional["_models.NlpVerticalLimitSettings"] = None, + search_space: Optional[List["_models.NlpParameterSubspace"]] = None, + sweep_settings: Optional["_models.NlpSweepSettings"] = None, validation_data: Optional["_models.MLTableJobInput"] = None, primary_metric: Optional[Union[str, "_models.ClassificationPrimaryMetrics"]] = None, **kwargs: Any @@ -20503,8 +31037,16 @@ def __init__( :keyword featurization_settings: Featurization inputs needed for AutoML job. :paramtype featurization_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :keyword fixed_parameters: Model/training parameters that will remain constant throughout + training. + :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters :keyword limit_settings: Execution constraints for AutoMLJob. :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] + :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings :keyword validation_data: Validation data inputs. :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :keyword primary_metric: Primary metric for Text-Classification task. Known values are: @@ -20515,7 +31057,10 @@ def __init__( """ super().__init__( featurization_settings=featurization_settings, + fixed_parameters=fixed_parameters, limit_settings=limit_settings, + search_space=search_space, + sweep_settings=sweep_settings, validation_data=validation_data, log_verbosity=log_verbosity, target_column_name=target_column_name, @@ -20528,11 +31073,14 @@ def __init__( self.training_data = training_data self.primary_metric = primary_metric self.featurization_settings = featurization_settings + self.fixed_parameters = fixed_parameters self.limit_settings = limit_settings + self.search_space = search_space + self.sweep_settings = sweep_settings self.validation_data = validation_data -class TextClassificationMultilabel(NlpVertical, AutoMLVertical): +class TextClassificationMultilabel(NlpVertical, AutoMLVertical): # pylint: disable=too-many-instance-attributes """Text Classification Multilabel task in AutoML NLP vertical. NLP - Natural Language Processing. @@ -20556,8 +31104,16 @@ class TextClassificationMultilabel(NlpVertical, AutoMLVertical): :ivar featurization_settings: Featurization inputs needed for AutoML job. :vartype featurization_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :ivar fixed_parameters: Model/training parameters that will remain constant throughout + training. + :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters :ivar limit_settings: Execution constraints for AutoMLJob. :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] + :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings :ivar validation_data: Validation data inputs. :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :ivar primary_metric: Primary metric for Text-Classification-Multilabel task. @@ -20580,7 +31136,10 @@ class TextClassificationMultilabel(NlpVertical, AutoMLVertical): "task_type": {"key": "taskType", "type": "str"}, "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, "featurization_settings": {"key": "featurizationSettings", "type": "NlpVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "NlpFixedParameters"}, "limit_settings": {"key": "limitSettings", "type": "NlpVerticalLimitSettings"}, + "search_space": {"key": "searchSpace", "type": "[NlpParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "NlpSweepSettings"}, "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, "primary_metric": {"key": "primaryMetric", "type": "str"}, } @@ -20592,7 +31151,10 @@ def __init__( log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, target_column_name: Optional[str] = None, featurization_settings: Optional["_models.NlpVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.NlpFixedParameters"] = None, limit_settings: Optional["_models.NlpVerticalLimitSettings"] = None, + search_space: Optional[List["_models.NlpParameterSubspace"]] = None, + sweep_settings: Optional["_models.NlpSweepSettings"] = None, validation_data: Optional["_models.MLTableJobInput"] = None, **kwargs: Any ) -> None: @@ -20608,14 +31170,25 @@ def __init__( :keyword featurization_settings: Featurization inputs needed for AutoML job. :paramtype featurization_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :keyword fixed_parameters: Model/training parameters that will remain constant throughout + training. + :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters :keyword limit_settings: Execution constraints for AutoMLJob. :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] + :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings :keyword validation_data: Validation data inputs. :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput """ super().__init__( featurization_settings=featurization_settings, + fixed_parameters=fixed_parameters, limit_settings=limit_settings, + search_space=search_space, + sweep_settings=sweep_settings, validation_data=validation_data, log_verbosity=log_verbosity, target_column_name=target_column_name, @@ -20628,11 +31201,14 @@ def __init__( self.training_data = training_data self.primary_metric = None self.featurization_settings = featurization_settings + self.fixed_parameters = fixed_parameters self.limit_settings = limit_settings + self.search_space = search_space + self.sweep_settings = sweep_settings self.validation_data = validation_data -class TextNer(NlpVertical, AutoMLVertical): +class TextNer(NlpVertical, AutoMLVertical): # pylint: disable=too-many-instance-attributes """Text-NER task in AutoML NLP vertical. NER - Named Entity Recognition. NLP - Natural Language Processing. @@ -20657,8 +31233,16 @@ class TextNer(NlpVertical, AutoMLVertical): :ivar featurization_settings: Featurization inputs needed for AutoML job. :vartype featurization_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :ivar fixed_parameters: Model/training parameters that will remain constant throughout + training. + :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters :ivar limit_settings: Execution constraints for AutoMLJob. :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] + :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings :ivar validation_data: Validation data inputs. :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :ivar primary_metric: Primary metric for Text-NER task. @@ -20681,7 +31265,10 @@ class TextNer(NlpVertical, AutoMLVertical): "task_type": {"key": "taskType", "type": "str"}, "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, "featurization_settings": {"key": "featurizationSettings", "type": "NlpVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "NlpFixedParameters"}, "limit_settings": {"key": "limitSettings", "type": "NlpVerticalLimitSettings"}, + "search_space": {"key": "searchSpace", "type": "[NlpParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "NlpSweepSettings"}, "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, "primary_metric": {"key": "primaryMetric", "type": "str"}, } @@ -20693,7 +31280,10 @@ def __init__( log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, target_column_name: Optional[str] = None, featurization_settings: Optional["_models.NlpVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.NlpFixedParameters"] = None, limit_settings: Optional["_models.NlpVerticalLimitSettings"] = None, + search_space: Optional[List["_models.NlpParameterSubspace"]] = None, + sweep_settings: Optional["_models.NlpSweepSettings"] = None, validation_data: Optional["_models.MLTableJobInput"] = None, **kwargs: Any ) -> None: @@ -20709,14 +31299,25 @@ def __init__( :keyword featurization_settings: Featurization inputs needed for AutoML job. :paramtype featurization_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :keyword fixed_parameters: Model/training parameters that will remain constant throughout + training. + :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters :keyword limit_settings: Execution constraints for AutoMLJob. :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] + :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings :keyword validation_data: Validation data inputs. :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput """ super().__init__( featurization_settings=featurization_settings, + fixed_parameters=fixed_parameters, limit_settings=limit_settings, + search_space=search_space, + sweep_settings=sweep_settings, validation_data=validation_data, log_verbosity=log_verbosity, target_column_name=target_column_name, @@ -20729,12 +31330,15 @@ def __init__( self.training_data = training_data self.primary_metric = None self.featurization_settings = featurization_settings + self.fixed_parameters = fixed_parameters self.limit_settings = limit_settings + self.search_space = search_space + self.sweep_settings = sweep_settings self.validation_data = validation_data class TmpfsOptions(_serialization.Model): - """Describes the tmpfs options for the container. + """TmpfsOptions. :ivar size: Mention the Tmpfs size. :vartype size: int @@ -20753,6 +31357,126 @@ def __init__(self, *, size: Optional[int] = None, **kwargs: Any) -> None: self.size = size +class TopNFeaturesByAttribution(MonitoringFeatureFilterBase): + """TopNFeaturesByAttribution. + + All required parameters must be populated in order to send to Azure. + + :ivar filter_type: [Required] Specifies the feature filter to leverage when selecting features + to calculate metrics over. Required. Known values are: "AllFeatures", "TopNByAttribution", and + "FeatureSubset". + :vartype filter_type: str or + ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterType + :ivar top: The number of top features to include. + :vartype top: int + """ + + _validation = { + "filter_type": {"required": True}, + } + + _attribute_map = { + "filter_type": {"key": "filterType", "type": "str"}, + "top": {"key": "top", "type": "int"}, + } + + def __init__(self, *, top: int = 10, **kwargs: Any) -> None: + """ + :keyword top: The number of top features to include. + :paramtype top: int + """ + super().__init__(**kwargs) + self.filter_type: str = "TopNByAttribution" + self.top = top + + +class TrailingInputData(MonitoringInputDataBase): + """Trailing input data definition. + + All required parameters must be populated in order to send to Azure. + + :ivar columns: Mapping of column names to special uses. + :vartype columns: dict[str, str] + :ivar data_context: The context metadata of the data source. + :vartype data_context: str + :ivar input_data_type: [Required] Specifies the type of signal to monitor. Required. Known + values are: "Static", "Trailing", and "Fixed". + :vartype input_data_type: str or + ~azure.mgmt.machinelearningservices.models.MonitoringInputDataType + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :ivar uri: [Required] Input Asset URI. Required. + :vartype uri: str + :ivar preprocessing_component_id: The ARM resource ID of the component resource used to + preprocess the data. + :vartype preprocessing_component_id: str + :ivar window_offset: [Required] The time offset between the end of the data window and the + monitor's current run time. Required. + :vartype window_offset: ~datetime.timedelta + :ivar window_size: [Required] The size of the trailing data window. Required. + :vartype window_size: ~datetime.timedelta + """ + + _validation = { + "input_data_type": {"required": True}, + "job_input_type": {"required": True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "window_offset": {"required": True}, + "window_size": {"required": True}, + } + + _attribute_map = { + "columns": {"key": "columns", "type": "{str}"}, + "data_context": {"key": "dataContext", "type": "str"}, + "input_data_type": {"key": "inputDataType", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + "preprocessing_component_id": {"key": "preprocessingComponentId", "type": "str"}, + "window_offset": {"key": "windowOffset", "type": "duration"}, + "window_size": {"key": "windowSize", "type": "duration"}, + } + + def __init__( + self, + *, + job_input_type: Union[str, "_models.JobInputType"], + uri: str, + window_offset: datetime.timedelta, + window_size: datetime.timedelta, + columns: Optional[Dict[str, str]] = None, + data_context: Optional[str] = None, + preprocessing_component_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword columns: Mapping of column names to special uses. + :paramtype columns: dict[str, str] + :keyword data_context: The context metadata of the data source. + :paramtype data_context: str + :keyword job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". + :paramtype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :keyword uri: [Required] Input Asset URI. Required. + :paramtype uri: str + :keyword preprocessing_component_id: The ARM resource ID of the component resource used to + preprocess the data. + :paramtype preprocessing_component_id: str + :keyword window_offset: [Required] The time offset between the end of the data window and the + monitor's current run time. Required. + :paramtype window_offset: ~datetime.timedelta + :keyword window_size: [Required] The size of the trailing data window. Required. + :paramtype window_size: ~datetime.timedelta + """ + super().__init__(columns=columns, data_context=data_context, job_input_type=job_input_type, uri=uri, **kwargs) + self.input_data_type: str = "Trailing" + self.preprocessing_component_id = preprocessing_component_id + self.window_offset = window_offset + self.window_size = window_size + + class TrialComponent(_serialization.Model): """Trial component definition. @@ -20826,6 +31550,41 @@ def __init__( self.resources = resources +class TritonInferencingServer(InferencingServer): + """Triton inferencing server configurations. + + All required parameters must be populated in order to send to Azure. + + :ivar server_type: [Required] Inferencing server type for various targets. Required. Known + values are: "AzureMLOnline", "AzureMLBatch", "Triton", and "Custom". + :vartype server_type: str or ~azure.mgmt.machinelearningservices.models.InferencingServerType + :ivar inference_configuration: Inference configuration for Triton. + :vartype inference_configuration: + ~azure.mgmt.machinelearningservices.models.OnlineInferenceConfiguration + """ + + _validation = { + "server_type": {"required": True}, + } + + _attribute_map = { + "server_type": {"key": "serverType", "type": "str"}, + "inference_configuration": {"key": "inferenceConfiguration", "type": "OnlineInferenceConfiguration"}, + } + + def __init__( + self, *, inference_configuration: Optional["_models.OnlineInferenceConfiguration"] = None, **kwargs: Any + ) -> None: + """ + :keyword inference_configuration: Inference configuration for Triton. + :paramtype inference_configuration: + ~azure.mgmt.machinelearningservices.models.OnlineInferenceConfiguration + """ + super().__init__(**kwargs) + self.server_type: str = "Triton" + self.inference_configuration = inference_configuration + + class TritonModelJobInput(AssetJobInput, JobInput): """TritonModelJobInput. @@ -20890,7 +31649,14 @@ class TritonModelJobOutput(AssetJobOutput, JobOutput): :ivar job_output_type: [Required] Specifies the type of job. Required. Known values are: "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType - :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount" and "Upload". + :ivar asset_name: Output Asset Name. + :vartype asset_name: str + :ivar asset_version: Output Asset Version. + :vartype asset_version: str + :ivar auto_delete_setting: Auto delete setting of output data asset. + :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :ivar uri: Output Asset URI. :vartype uri: str @@ -20903,6 +31669,9 @@ class TritonModelJobOutput(AssetJobOutput, JobOutput): _attribute_map = { "description": {"key": "description", "type": "str"}, "job_output_type": {"key": "jobOutputType", "type": "str"}, + "asset_name": {"key": "assetName", "type": "str"}, + "asset_version": {"key": "assetVersion", "type": "str"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, "mode": {"key": "mode", "type": "str"}, "uri": {"key": "uri", "type": "str"}, } @@ -20911,6 +31680,9 @@ def __init__( self, *, description: Optional[str] = None, + asset_name: Optional[str] = None, + asset_version: Optional[str] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, uri: Optional[str] = None, **kwargs: Any @@ -20918,14 +31690,32 @@ def __init__( """ :keyword description: Description for the output. :paramtype description: str - :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount" and "Upload". + :keyword asset_name: Output Asset Name. + :paramtype asset_name: str + :keyword asset_version: Output Asset Version. + :paramtype asset_version: str + :keyword auto_delete_setting: Auto delete setting of output data asset. + :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :keyword uri: Output Asset URI. :paramtype uri: str """ - super().__init__(mode=mode, uri=uri, description=description, **kwargs) + super().__init__( + asset_name=asset_name, + asset_version=asset_version, + auto_delete_setting=auto_delete_setting, + mode=mode, + uri=uri, + description=description, + **kwargs + ) self.description = description self.job_output_type: str = "triton_model" + self.asset_name = asset_name + self.asset_version = asset_version + self.auto_delete_setting = auto_delete_setting self.mode = mode self.uri = uri @@ -21067,9 +31857,13 @@ class UriFileDataVersion(DataVersionBaseProperties): :vartype properties: dict[str, str] :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] - :ivar is_anonymous: If the name version are system generated (anonymous registration). + :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :ivar is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. :vartype is_anonymous: bool - :ivar is_archived: Is the asset archived?. + :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. :vartype is_archived: bool :ivar data_type: [Required] Specifies the type of data. Required. Known values are: "uri_file", "uri_folder", and "mltable". @@ -21077,6 +31871,11 @@ class UriFileDataVersion(DataVersionBaseProperties): :ivar data_uri: [Required] Uri of the data. Example: https://go.microsoft.com/fwlink/?linkid=2202330. Required. :vartype data_uri: str + :ivar intellectual_property: Intellectual Property details. Used if data is an Intellectual + Property. + :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty + :ivar stage: Stage in the data lifecycle assigned to this data asset. + :vartype stage: str """ _validation = { @@ -21088,10 +31887,13 @@ class UriFileDataVersion(DataVersionBaseProperties): "description": {"key": "description", "type": "str"}, "properties": {"key": "properties", "type": "{str}"}, "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, "is_anonymous": {"key": "isAnonymous", "type": "bool"}, "is_archived": {"key": "isArchived", "type": "bool"}, "data_type": {"key": "dataType", "type": "str"}, "data_uri": {"key": "dataUri", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "stage": {"key": "stage", "type": "str"}, } def __init__( @@ -21101,8 +31903,11 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, is_anonymous: bool = False, is_archived: bool = False, + intellectual_property: Optional["_models.IntellectualProperty"] = None, + stage: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -21112,21 +31917,34 @@ def __init__( :paramtype properties: dict[str, str] :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] - :keyword is_anonymous: If the name version are system generated (anonymous registration). + :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :keyword is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. :paramtype is_anonymous: bool - :keyword is_archived: Is the asset archived?. + :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. :paramtype is_archived: bool :keyword data_uri: [Required] Uri of the data. Example: https://go.microsoft.com/fwlink/?linkid=2202330. Required. :paramtype data_uri: str + :keyword intellectual_property: Intellectual Property details. Used if data is an Intellectual + Property. + :paramtype intellectual_property: + ~azure.mgmt.machinelearningservices.models.IntellectualProperty + :keyword stage: Stage in the data lifecycle assigned to this data asset. + :paramtype stage: str """ super().__init__( description=description, properties=properties, tags=tags, + auto_delete_setting=auto_delete_setting, is_anonymous=is_anonymous, is_archived=is_archived, data_uri=data_uri, + intellectual_property=intellectual_property, + stage=stage, **kwargs ) self.data_type: str = "uri_file" @@ -21196,7 +32014,14 @@ class UriFileJobOutput(AssetJobOutput, JobOutput): :ivar job_output_type: [Required] Specifies the type of job. Required. Known values are: "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType - :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount" and "Upload". + :ivar asset_name: Output Asset Name. + :vartype asset_name: str + :ivar asset_version: Output Asset Version. + :vartype asset_version: str + :ivar auto_delete_setting: Auto delete setting of output data asset. + :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :ivar uri: Output Asset URI. :vartype uri: str @@ -21209,6 +32034,9 @@ class UriFileJobOutput(AssetJobOutput, JobOutput): _attribute_map = { "description": {"key": "description", "type": "str"}, "job_output_type": {"key": "jobOutputType", "type": "str"}, + "asset_name": {"key": "assetName", "type": "str"}, + "asset_version": {"key": "assetVersion", "type": "str"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, "mode": {"key": "mode", "type": "str"}, "uri": {"key": "uri", "type": "str"}, } @@ -21217,6 +32045,9 @@ def __init__( self, *, description: Optional[str] = None, + asset_name: Optional[str] = None, + asset_version: Optional[str] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, uri: Optional[str] = None, **kwargs: Any @@ -21224,14 +32055,32 @@ def __init__( """ :keyword description: Description for the output. :paramtype description: str - :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount" and "Upload". + :keyword asset_name: Output Asset Name. + :paramtype asset_name: str + :keyword asset_version: Output Asset Version. + :paramtype asset_version: str + :keyword auto_delete_setting: Auto delete setting of output data asset. + :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :keyword uri: Output Asset URI. :paramtype uri: str """ - super().__init__(mode=mode, uri=uri, description=description, **kwargs) + super().__init__( + asset_name=asset_name, + asset_version=asset_version, + auto_delete_setting=auto_delete_setting, + mode=mode, + uri=uri, + description=description, + **kwargs + ) self.description = description self.job_output_type: str = "uri_file" + self.asset_name = asset_name + self.asset_version = asset_version + self.auto_delete_setting = auto_delete_setting self.mode = mode self.uri = uri @@ -21247,9 +32096,13 @@ class UriFolderDataVersion(DataVersionBaseProperties): :vartype properties: dict[str, str] :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] - :ivar is_anonymous: If the name version are system generated (anonymous registration). + :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :ivar is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. :vartype is_anonymous: bool - :ivar is_archived: Is the asset archived?. + :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. :vartype is_archived: bool :ivar data_type: [Required] Specifies the type of data. Required. Known values are: "uri_file", "uri_folder", and "mltable". @@ -21257,6 +32110,11 @@ class UriFolderDataVersion(DataVersionBaseProperties): :ivar data_uri: [Required] Uri of the data. Example: https://go.microsoft.com/fwlink/?linkid=2202330. Required. :vartype data_uri: str + :ivar intellectual_property: Intellectual Property details. Used if data is an Intellectual + Property. + :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty + :ivar stage: Stage in the data lifecycle assigned to this data asset. + :vartype stage: str """ _validation = { @@ -21268,10 +32126,13 @@ class UriFolderDataVersion(DataVersionBaseProperties): "description": {"key": "description", "type": "str"}, "properties": {"key": "properties", "type": "{str}"}, "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, "is_anonymous": {"key": "isAnonymous", "type": "bool"}, "is_archived": {"key": "isArchived", "type": "bool"}, "data_type": {"key": "dataType", "type": "str"}, "data_uri": {"key": "dataUri", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "stage": {"key": "stage", "type": "str"}, } def __init__( @@ -21281,8 +32142,11 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, is_anonymous: bool = False, is_archived: bool = False, + intellectual_property: Optional["_models.IntellectualProperty"] = None, + stage: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -21292,21 +32156,34 @@ def __init__( :paramtype properties: dict[str, str] :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] - :keyword is_anonymous: If the name version are system generated (anonymous registration). + :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. + :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :keyword is_anonymous: If the name version are system generated (anonymous registration). For + types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. :paramtype is_anonymous: bool - :keyword is_archived: Is the asset archived?. + :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is + provided it will be used to populate IsArchived. :paramtype is_archived: bool :keyword data_uri: [Required] Uri of the data. Example: https://go.microsoft.com/fwlink/?linkid=2202330. Required. :paramtype data_uri: str + :keyword intellectual_property: Intellectual Property details. Used if data is an Intellectual + Property. + :paramtype intellectual_property: + ~azure.mgmt.machinelearningservices.models.IntellectualProperty + :keyword stage: Stage in the data lifecycle assigned to this data asset. + :paramtype stage: str """ super().__init__( description=description, properties=properties, tags=tags, + auto_delete_setting=auto_delete_setting, is_anonymous=is_anonymous, is_archived=is_archived, data_uri=data_uri, + intellectual_property=intellectual_property, + stage=stage, **kwargs ) self.data_type: str = "uri_folder" @@ -21376,7 +32253,14 @@ class UriFolderJobOutput(AssetJobOutput, JobOutput): :ivar job_output_type: [Required] Specifies the type of job. Required. Known values are: "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType - :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount" and "Upload". + :ivar asset_name: Output Asset Name. + :vartype asset_name: str + :ivar asset_version: Output Asset Version. + :vartype asset_version: str + :ivar auto_delete_setting: Auto delete setting of output data asset. + :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :ivar uri: Output Asset URI. :vartype uri: str @@ -21389,6 +32273,9 @@ class UriFolderJobOutput(AssetJobOutput, JobOutput): _attribute_map = { "description": {"key": "description", "type": "str"}, "job_output_type": {"key": "jobOutputType", "type": "str"}, + "asset_name": {"key": "assetName", "type": "str"}, + "asset_version": {"key": "assetVersion", "type": "str"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, "mode": {"key": "mode", "type": "str"}, "uri": {"key": "uri", "type": "str"}, } @@ -21397,6 +32284,9 @@ def __init__( self, *, description: Optional[str] = None, + asset_name: Optional[str] = None, + asset_version: Optional[str] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, uri: Optional[str] = None, **kwargs: Any @@ -21404,14 +32294,32 @@ def __init__( """ :keyword description: Description for the output. :paramtype description: str - :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount" and "Upload". + :keyword asset_name: Output Asset Name. + :paramtype asset_name: str + :keyword asset_version: Output Asset Version. + :paramtype asset_version: str + :keyword auto_delete_setting: Auto delete setting of output data asset. + :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and + "Direct". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :keyword uri: Output Asset URI. :paramtype uri: str """ - super().__init__(mode=mode, uri=uri, description=description, **kwargs) + super().__init__( + asset_name=asset_name, + asset_version=asset_version, + auto_delete_setting=auto_delete_setting, + mode=mode, + uri=uri, + description=description, + **kwargs + ) self.description = description self.job_output_type: str = "uri_folder" + self.asset_name = asset_name + self.asset_version = asset_version + self.auto_delete_setting = auto_delete_setting self.mode = mode self.uri = uri @@ -21643,17 +32551,20 @@ class UsernamePasswordAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionP All required parameters must be populated in order to send to Azure. :ivar auth_type: Authentication type of the connection target. Required. Known values are: - "PAT", "ManagedIdentity", "UsernamePassword", "None", and "SAS". + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", "AccessKey", + "ApiKey", and "CustomKeys". :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType :ivar category: Category of the connection. Known values are: "PythonFeed", - "ContainerRegistry", and "Git". + "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", + "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", + "CognitiveSearch", "CognitiveService", and "CustomKeys". :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar expiry_time: + :vartype expiry_time: ~datetime.datetime + :ivar metadata: Any object. + :vartype metadata: JSON :ivar target: :vartype target: str - :ivar value: Value details of the workspace connection. - :vartype value: str - :ivar value_format: format for the workspace connection value. "JSON" - :vartype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat :ivar credentials: :vartype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionUsernamePassword @@ -21666,9 +32577,9 @@ class UsernamePasswordAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionP _attribute_map = { "auth_type": {"key": "authType", "type": "str"}, "category": {"key": "category", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "metadata": {"key": "metadata", "type": "object"}, "target": {"key": "target", "type": "str"}, - "value": {"key": "value", "type": "str"}, - "value_format": {"key": "valueFormat", "type": "str"}, "credentials": {"key": "credentials", "type": "WorkspaceConnectionUsernamePassword"}, } @@ -21676,27 +32587,29 @@ def __init__( self, *, category: Optional[Union[str, "_models.ConnectionCategory"]] = None, + expiry_time: Optional[datetime.datetime] = None, + metadata: Optional[JSON] = None, target: Optional[str] = None, - value: Optional[str] = None, - value_format: Optional[Union[str, "_models.ValueFormat"]] = None, credentials: Optional["_models.WorkspaceConnectionUsernamePassword"] = None, **kwargs: Any ) -> None: """ :keyword category: Category of the connection. Known values are: "PythonFeed", - "ContainerRegistry", and "Git". + "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", + "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", + "CognitiveSearch", "CognitiveService", and "CustomKeys". :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :keyword expiry_time: + :paramtype expiry_time: ~datetime.datetime + :keyword metadata: Any object. + :paramtype metadata: JSON :keyword target: :paramtype target: str - :keyword value: Value details of the workspace connection. - :paramtype value: str - :keyword value_format: format for the workspace connection value. "JSON" - :paramtype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat :keyword credentials: :paramtype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionUsernamePassword """ - super().__init__(category=category, target=target, value=value, value_format=value_format, **kwargs) + super().__init__(category=category, expiry_time=expiry_time, metadata=metadata, target=target, **kwargs) self.auth_type: str = "UsernamePassword" self.credentials = credentials @@ -22130,7 +33043,7 @@ def __init__( class VolumeDefinition(_serialization.Model): - """Describes the volume configuration for the container. + """VolumeDefinition. :ivar type: Type of Volume Definition. Possible Values: bind,volume,tmpfs,npipe. Known values are: "bind", "volume", "tmpfs", and "npipe". @@ -22207,7 +33120,7 @@ def __init__( class VolumeOptions(_serialization.Model): - """Describes the volume options for the container. + """VolumeOptions. :ivar nocopy: Indicate whether volume is nocopy. :vartype nocopy: bool @@ -22242,83 +33155,112 @@ class Workspace(Resource): # pylint: disable=too-many-instance-attributes :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar identity: The identity of the resource. + :ivar identity: Managed service identity (system assigned and/or user assigned identities). :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :ivar location: Specifies the location of the resource. + :ivar kind: + :vartype kind: str + :ivar location: :vartype location: str - :ivar tags: Contains resource tags defined as key/value pairs. - :vartype tags: dict[str, str] - :ivar sku: The sku of the workspace. + :ivar sku: Optional. This field is required to be implemented by the RP because AML is + supporting more than one tier. :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku - :ivar workspace_id: The immutable id associated with this workspace. - :vartype workspace_id: str - :ivar description: The description of this workspace. - :vartype description: str - :ivar friendly_name: The friendly name for this workspace. This name in mutable. - :vartype friendly_name: str - :ivar key_vault: ARM id of the key vault associated with this workspace. This cannot be changed - once the workspace has been created. - :vartype key_vault: str + :ivar tags: Dictionary of :code:``. + :vartype tags: dict[str, str] + :ivar allow_public_access_when_behind_vnet: The flag to indicate whether to allow public access + when behind VNet. + :vartype allow_public_access_when_behind_vnet: bool :ivar application_insights: ARM id of the application insights associated with this workspace. :vartype application_insights: str + :ivar associated_workspaces: + :vartype associated_workspaces: list[str] + :ivar container_registries: + :vartype container_registries: list[str] :ivar container_registry: ARM id of the container registry associated with this workspace. :vartype container_registry: str - :ivar storage_account: ARM id of the storage account associated with this workspace. This - cannot be changed once the workspace has been created. - :vartype storage_account: str + :ivar description: The description of this workspace. + :vartype description: str :ivar discovery_url: Url for the discovery service to identify regional endpoints for machine learning experimentation services. :vartype discovery_url: str - :ivar provisioning_state: The current deployment state of workspace resource. The - provisioningState is to indicate states for resource provisioning. Known values are: "Unknown", - "Updating", "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :ivar encryption: The encryption settings of Azure ML workspace. + :ivar enable_data_isolation: + :vartype enable_data_isolation: bool + :ivar encryption: :vartype encryption: ~azure.mgmt.machinelearningservices.models.EncryptionProperty + :ivar existing_workspaces: + :vartype existing_workspaces: list[str] + :ivar feature_store_settings: Settings for feature store type workspace. + :vartype feature_store_settings: + ~azure.mgmt.machinelearningservices.models.FeatureStoreSettings + :ivar friendly_name: The friendly name for this workspace. This name in mutable. + :vartype friendly_name: str :ivar hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data collected by the service. :vartype hbi_workspace: bool - :ivar service_provisioned_resource_group: The name of the managed resource group created by - workspace RP in customer subscription if the workspace is CMK workspace. - :vartype service_provisioned_resource_group: str - :ivar private_link_count: Count of private connections in the workspace. - :vartype private_link_count: int + :ivar hub_resource_id: + :vartype hub_resource_id: str :ivar image_build_compute: The compute name for image build. :vartype image_build_compute: str - :ivar allow_public_access_when_behind_vnet: The flag to indicate whether to allow public access - when behind VNet. - :vartype allow_public_access_when_behind_vnet: bool - :ivar public_network_access: Whether requests from Public Network are allowed. Known values - are: "Enabled" and "Disabled". - :vartype public_network_access: str or - ~azure.mgmt.machinelearningservices.models.PublicNetworkAccess + :ivar key_vault: ARM id of the key vault associated with this workspace. This cannot be changed + once the workspace has been created. + :vartype key_vault: str + :ivar key_vaults: + :vartype key_vaults: list[str] + :ivar managed_network: Managed Network settings for a machine learning workspace. + :vartype managed_network: ~azure.mgmt.machinelearningservices.models.ManagedNetworkSettings + :ivar ml_flow_tracking_uri: The URI associated with this workspace that machine learning flow + must point at to set up tracking. + :vartype ml_flow_tracking_uri: str + :ivar notebook_info: The notebook info of Azure ML workspace. + :vartype notebook_info: ~azure.mgmt.machinelearningservices.models.NotebookResourceInfo + :ivar primary_user_assigned_identity: The user assigned identity resource id that represents + the workspace identity. + :vartype primary_user_assigned_identity: str :ivar private_endpoint_connections: The list of private endpoint connections in the workspace. :vartype private_endpoint_connections: list[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection] + :ivar private_link_count: Count of private connections in the workspace. + :vartype private_link_count: int + :ivar provisioning_state: The current deployment state of workspace resource. The + provisioningState is to indicate states for resource provisioning. Known values are: "Unknown", + "Updating", "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ProvisioningState + :ivar public_network_access: Whether requests from Public Network are allowed. Known values + are: "Enabled" and "Disabled". + :vartype public_network_access: str or + ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType + :ivar service_managed_resources_settings: The service managed resource settings. + :vartype service_managed_resources_settings: + ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings + :ivar service_provisioned_resource_group: The name of the managed resource group created by + workspace RP in customer subscription if the workspace is CMK workspace. + :vartype service_provisioned_resource_group: str :ivar shared_private_link_resources: The list of shared private link resources in this workspace. :vartype shared_private_link_resources: list[~azure.mgmt.machinelearningservices.models.SharedPrivateLinkResource] - :ivar notebook_info: The notebook info of Azure ML workspace. - :vartype notebook_info: ~azure.mgmt.machinelearningservices.models.NotebookResourceInfo - :ivar service_managed_resources_settings: The service managed resource settings. - :vartype service_managed_resources_settings: - ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings - :ivar primary_user_assigned_identity: The user assigned identity resource id that represents - the workspace identity. - :vartype primary_user_assigned_identity: str - :ivar tenant_id: The tenant id associated with this workspace. - :vartype tenant_id: str + :ivar soft_delete_retention_in_days: Retention time in days after workspace get soft deleted. + :vartype soft_delete_retention_in_days: int + :ivar storage_account: ARM id of the storage account associated with this workspace. This + cannot be changed once the workspace has been created. + :vartype storage_account: str + :ivar storage_accounts: + :vartype storage_accounts: list[str] :ivar storage_hns_enabled: If the storage associated with the workspace has hierarchical namespace(HNS) enabled. :vartype storage_hns_enabled: bool - :ivar ml_flow_tracking_uri: The URI associated with this workspace that machine learning flow - must point at to set up tracking. - :vartype ml_flow_tracking_uri: str + :ivar system_datastores_auth_mode: The auth mode used for accessing the system datastores of + the workspace. + :vartype system_datastores_auth_mode: str + :ivar tenant_id: The tenant id associated with this workspace. + :vartype tenant_id: str :ivar v1_legacy_mode: Enabling v1_legacy_mode may prevent you from using features provided by the v2 API. :vartype v1_legacy_mode: bool + :ivar workspace_hub_config: WorkspaceHub's configuration object. + :vartype workspace_hub_config: ~azure.mgmt.machinelearningservices.models.WorkspaceHubConfig + :ivar workspace_id: The immutable id associated with this workspace. + :vartype workspace_id: str """ _validation = { @@ -22326,15 +33268,15 @@ class Workspace(Resource): # pylint: disable=too-many-instance-attributes "name": {"readonly": True}, "type": {"readonly": True}, "system_data": {"readonly": True}, - "workspace_id": {"readonly": True}, + "ml_flow_tracking_uri": {"readonly": True}, + "notebook_info": {"readonly": True}, + "private_endpoint_connections": {"readonly": True}, + "private_link_count": {"readonly": True}, "provisioning_state": {"readonly": True}, "service_provisioned_resource_group": {"readonly": True}, - "private_link_count": {"readonly": True}, - "private_endpoint_connections": {"readonly": True}, - "notebook_info": {"readonly": True}, - "tenant_id": {"readonly": True}, "storage_hns_enabled": {"readonly": True}, - "ml_flow_tracking_uri": {"readonly": True}, + "tenant_id": {"readonly": True}, + "workspace_id": {"readonly": True}, } _attribute_map = { @@ -22343,181 +33285,298 @@ class Workspace(Resource): # pylint: disable=too-many-instance-attributes "type": {"key": "type", "type": "str"}, "system_data": {"key": "systemData", "type": "SystemData"}, "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, "location": {"key": "location", "type": "str"}, - "tags": {"key": "tags", "type": "{str}"}, "sku": {"key": "sku", "type": "Sku"}, - "workspace_id": {"key": "properties.workspaceId", "type": "str"}, - "description": {"key": "properties.description", "type": "str"}, - "friendly_name": {"key": "properties.friendlyName", "type": "str"}, - "key_vault": {"key": "properties.keyVault", "type": "str"}, + "tags": {"key": "tags", "type": "{str}"}, + "allow_public_access_when_behind_vnet": {"key": "properties.allowPublicAccessWhenBehindVnet", "type": "bool"}, "application_insights": {"key": "properties.applicationInsights", "type": "str"}, + "associated_workspaces": {"key": "properties.associatedWorkspaces", "type": "[str]"}, + "container_registries": {"key": "properties.containerRegistries", "type": "[str]"}, "container_registry": {"key": "properties.containerRegistry", "type": "str"}, - "storage_account": {"key": "properties.storageAccount", "type": "str"}, + "description": {"key": "properties.description", "type": "str"}, "discovery_url": {"key": "properties.discoveryUrl", "type": "str"}, - "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, + "enable_data_isolation": {"key": "properties.enableDataIsolation", "type": "bool"}, "encryption": {"key": "properties.encryption", "type": "EncryptionProperty"}, + "existing_workspaces": {"key": "properties.existingWorkspaces", "type": "[str]"}, + "feature_store_settings": {"key": "properties.featureStoreSettings", "type": "FeatureStoreSettings"}, + "friendly_name": {"key": "properties.friendlyName", "type": "str"}, "hbi_workspace": {"key": "properties.hbiWorkspace", "type": "bool"}, - "service_provisioned_resource_group": {"key": "properties.serviceProvisionedResourceGroup", "type": "str"}, - "private_link_count": {"key": "properties.privateLinkCount", "type": "int"}, + "hub_resource_id": {"key": "properties.hubResourceId", "type": "str"}, "image_build_compute": {"key": "properties.imageBuildCompute", "type": "str"}, - "allow_public_access_when_behind_vnet": {"key": "properties.allowPublicAccessWhenBehindVnet", "type": "bool"}, - "public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"}, + "key_vault": {"key": "properties.keyVault", "type": "str"}, + "key_vaults": {"key": "properties.keyVaults", "type": "[str]"}, + "managed_network": {"key": "properties.managedNetwork", "type": "ManagedNetworkSettings"}, + "ml_flow_tracking_uri": {"key": "properties.mlFlowTrackingUri", "type": "str"}, + "notebook_info": {"key": "properties.notebookInfo", "type": "NotebookResourceInfo"}, + "primary_user_assigned_identity": {"key": "properties.primaryUserAssignedIdentity", "type": "str"}, "private_endpoint_connections": { "key": "properties.privateEndpointConnections", "type": "[PrivateEndpointConnection]", }, - "shared_private_link_resources": { - "key": "properties.sharedPrivateLinkResources", - "type": "[SharedPrivateLinkResource]", - }, - "notebook_info": {"key": "properties.notebookInfo", "type": "NotebookResourceInfo"}, + "private_link_count": {"key": "properties.privateLinkCount", "type": "int"}, + "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, + "public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"}, "service_managed_resources_settings": { "key": "properties.serviceManagedResourcesSettings", "type": "ServiceManagedResourcesSettings", }, - "primary_user_assigned_identity": {"key": "properties.primaryUserAssignedIdentity", "type": "str"}, - "tenant_id": {"key": "properties.tenantId", "type": "str"}, + "service_provisioned_resource_group": {"key": "properties.serviceProvisionedResourceGroup", "type": "str"}, + "shared_private_link_resources": { + "key": "properties.sharedPrivateLinkResources", + "type": "[SharedPrivateLinkResource]", + }, + "soft_delete_retention_in_days": {"key": "properties.softDeleteRetentionInDays", "type": "int"}, + "storage_account": {"key": "properties.storageAccount", "type": "str"}, + "storage_accounts": {"key": "properties.storageAccounts", "type": "[str]"}, "storage_hns_enabled": {"key": "properties.storageHnsEnabled", "type": "bool"}, - "ml_flow_tracking_uri": {"key": "properties.mlFlowTrackingUri", "type": "str"}, + "system_datastores_auth_mode": {"key": "properties.systemDatastoresAuthMode", "type": "str"}, + "tenant_id": {"key": "properties.tenantId", "type": "str"}, "v1_legacy_mode": {"key": "properties.v1LegacyMode", "type": "bool"}, + "workspace_hub_config": {"key": "properties.workspaceHubConfig", "type": "WorkspaceHubConfig"}, + "workspace_id": {"key": "properties.workspaceId", "type": "str"}, } def __init__( # pylint: disable=too-many-locals self, *, identity: Optional["_models.ManagedServiceIdentity"] = None, + kind: Optional[str] = None, location: Optional[str] = None, - tags: Optional[Dict[str, str]] = None, sku: Optional["_models.Sku"] = None, - description: Optional[str] = None, - friendly_name: Optional[str] = None, - key_vault: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + allow_public_access_when_behind_vnet: Optional[bool] = None, application_insights: Optional[str] = None, + associated_workspaces: Optional[List[str]] = None, + container_registries: Optional[List[str]] = None, container_registry: Optional[str] = None, - storage_account: Optional[str] = None, + description: Optional[str] = None, discovery_url: Optional[str] = None, + enable_data_isolation: Optional[bool] = None, encryption: Optional["_models.EncryptionProperty"] = None, - hbi_workspace: bool = False, + existing_workspaces: Optional[List[str]] = None, + feature_store_settings: Optional["_models.FeatureStoreSettings"] = None, + friendly_name: Optional[str] = None, + hbi_workspace: Optional[bool] = None, + hub_resource_id: Optional[str] = None, image_build_compute: Optional[str] = None, - allow_public_access_when_behind_vnet: bool = False, - public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None, - shared_private_link_resources: Optional[List["_models.SharedPrivateLinkResource"]] = None, - service_managed_resources_settings: Optional["_models.ServiceManagedResourcesSettings"] = None, + key_vault: Optional[str] = None, + key_vaults: Optional[List[str]] = None, + managed_network: Optional["_models.ManagedNetworkSettings"] = None, primary_user_assigned_identity: Optional[str] = None, - v1_legacy_mode: bool = False, + public_network_access: Optional[Union[str, "_models.PublicNetworkAccessType"]] = None, + service_managed_resources_settings: Optional["_models.ServiceManagedResourcesSettings"] = None, + shared_private_link_resources: Optional[List["_models.SharedPrivateLinkResource"]] = None, + soft_delete_retention_in_days: Optional[int] = None, + storage_account: Optional[str] = None, + storage_accounts: Optional[List[str]] = None, + system_datastores_auth_mode: Optional[str] = None, + v1_legacy_mode: Optional[bool] = None, + workspace_hub_config: Optional["_models.WorkspaceHubConfig"] = None, **kwargs: Any ) -> None: """ - :keyword identity: The identity of the resource. + :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :keyword location: Specifies the location of the resource. + :keyword kind: + :paramtype kind: str + :keyword location: :paramtype location: str - :keyword tags: Contains resource tags defined as key/value pairs. - :paramtype tags: dict[str, str] - :keyword sku: The sku of the workspace. + :keyword sku: Optional. This field is required to be implemented by the RP because AML is + supporting more than one tier. :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku - :keyword description: The description of this workspace. - :paramtype description: str - :keyword friendly_name: The friendly name for this workspace. This name in mutable. - :paramtype friendly_name: str - :keyword key_vault: ARM id of the key vault associated with this workspace. This cannot be - changed once the workspace has been created. - :paramtype key_vault: str + :keyword tags: Dictionary of :code:``. + :paramtype tags: dict[str, str] + :keyword allow_public_access_when_behind_vnet: The flag to indicate whether to allow public + access when behind VNet. + :paramtype allow_public_access_when_behind_vnet: bool :keyword application_insights: ARM id of the application insights associated with this workspace. :paramtype application_insights: str + :keyword associated_workspaces: + :paramtype associated_workspaces: list[str] + :keyword container_registries: + :paramtype container_registries: list[str] :keyword container_registry: ARM id of the container registry associated with this workspace. :paramtype container_registry: str - :keyword storage_account: ARM id of the storage account associated with this workspace. This - cannot be changed once the workspace has been created. - :paramtype storage_account: str + :keyword description: The description of this workspace. + :paramtype description: str :keyword discovery_url: Url for the discovery service to identify regional endpoints for machine learning experimentation services. :paramtype discovery_url: str - :keyword encryption: The encryption settings of Azure ML workspace. + :keyword enable_data_isolation: + :paramtype enable_data_isolation: bool + :keyword encryption: :paramtype encryption: ~azure.mgmt.machinelearningservices.models.EncryptionProperty + :keyword existing_workspaces: + :paramtype existing_workspaces: list[str] + :keyword feature_store_settings: Settings for feature store type workspace. + :paramtype feature_store_settings: + ~azure.mgmt.machinelearningservices.models.FeatureStoreSettings + :keyword friendly_name: The friendly name for this workspace. This name in mutable. + :paramtype friendly_name: str :keyword hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data collected by the service. :paramtype hbi_workspace: bool + :keyword hub_resource_id: + :paramtype hub_resource_id: str :keyword image_build_compute: The compute name for image build. :paramtype image_build_compute: str - :keyword allow_public_access_when_behind_vnet: The flag to indicate whether to allow public - access when behind VNet. - :paramtype allow_public_access_when_behind_vnet: bool + :keyword key_vault: ARM id of the key vault associated with this workspace. This cannot be + changed once the workspace has been created. + :paramtype key_vault: str + :keyword key_vaults: + :paramtype key_vaults: list[str] + :keyword managed_network: Managed Network settings for a machine learning workspace. + :paramtype managed_network: ~azure.mgmt.machinelearningservices.models.ManagedNetworkSettings + :keyword primary_user_assigned_identity: The user assigned identity resource id that represents + the workspace identity. + :paramtype primary_user_assigned_identity: str :keyword public_network_access: Whether requests from Public Network are allowed. Known values are: "Enabled" and "Disabled". :paramtype public_network_access: str or - ~azure.mgmt.machinelearningservices.models.PublicNetworkAccess + ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType + :keyword service_managed_resources_settings: The service managed resource settings. + :paramtype service_managed_resources_settings: + ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings :keyword shared_private_link_resources: The list of shared private link resources in this workspace. :paramtype shared_private_link_resources: list[~azure.mgmt.machinelearningservices.models.SharedPrivateLinkResource] - :keyword service_managed_resources_settings: The service managed resource settings. - :paramtype service_managed_resources_settings: - ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings - :keyword primary_user_assigned_identity: The user assigned identity resource id that represents - the workspace identity. - :paramtype primary_user_assigned_identity: str + :keyword soft_delete_retention_in_days: Retention time in days after workspace get soft + deleted. + :paramtype soft_delete_retention_in_days: int + :keyword storage_account: ARM id of the storage account associated with this workspace. This + cannot be changed once the workspace has been created. + :paramtype storage_account: str + :keyword storage_accounts: + :paramtype storage_accounts: list[str] + :keyword system_datastores_auth_mode: The auth mode used for accessing the system datastores of + the workspace. + :paramtype system_datastores_auth_mode: str :keyword v1_legacy_mode: Enabling v1_legacy_mode may prevent you from using features provided by the v2 API. :paramtype v1_legacy_mode: bool + :keyword workspace_hub_config: WorkspaceHub's configuration object. + :paramtype workspace_hub_config: ~azure.mgmt.machinelearningservices.models.WorkspaceHubConfig """ super().__init__(**kwargs) self.identity = identity + self.kind = kind self.location = location - self.tags = tags self.sku = sku - self.workspace_id = None - self.description = description - self.friendly_name = friendly_name - self.key_vault = key_vault + self.tags = tags + self.allow_public_access_when_behind_vnet = allow_public_access_when_behind_vnet self.application_insights = application_insights + self.associated_workspaces = associated_workspaces + self.container_registries = container_registries self.container_registry = container_registry - self.storage_account = storage_account + self.description = description self.discovery_url = discovery_url - self.provisioning_state = None + self.enable_data_isolation = enable_data_isolation self.encryption = encryption + self.existing_workspaces = existing_workspaces + self.feature_store_settings = feature_store_settings + self.friendly_name = friendly_name self.hbi_workspace = hbi_workspace - self.service_provisioned_resource_group = None - self.private_link_count = None + self.hub_resource_id = hub_resource_id self.image_build_compute = image_build_compute - self.allow_public_access_when_behind_vnet = allow_public_access_when_behind_vnet - self.public_network_access = public_network_access - self.private_endpoint_connections = None - self.shared_private_link_resources = shared_private_link_resources + self.key_vault = key_vault + self.key_vaults = key_vaults + self.managed_network = managed_network + self.ml_flow_tracking_uri = None self.notebook_info = None - self.service_managed_resources_settings = service_managed_resources_settings self.primary_user_assigned_identity = primary_user_assigned_identity - self.tenant_id = None + self.private_endpoint_connections = None + self.private_link_count = None + self.provisioning_state = None + self.public_network_access = public_network_access + self.service_managed_resources_settings = service_managed_resources_settings + self.service_provisioned_resource_group = None + self.shared_private_link_resources = shared_private_link_resources + self.soft_delete_retention_in_days = soft_delete_retention_in_days + self.storage_account = storage_account + self.storage_accounts = storage_accounts self.storage_hns_enabled = None - self.ml_flow_tracking_uri = None + self.system_datastores_auth_mode = system_datastores_auth_mode + self.tenant_id = None self.v1_legacy_mode = v1_legacy_mode + self.workspace_hub_config = workspace_hub_config + self.workspace_id = None + + +class WorkspaceConnectionAccessKey(_serialization.Model): + """WorkspaceConnectionAccessKey. + + :ivar access_key_id: + :vartype access_key_id: str + :ivar secret_access_key: + :vartype secret_access_key: str + """ + + _attribute_map = { + "access_key_id": {"key": "accessKeyId", "type": "str"}, + "secret_access_key": {"key": "secretAccessKey", "type": "str"}, + } + + def __init__( + self, *, access_key_id: Optional[str] = None, secret_access_key: Optional[str] = None, **kwargs: Any + ) -> None: + """ + :keyword access_key_id: + :paramtype access_key_id: str + :keyword secret_access_key: + :paramtype secret_access_key: str + """ + super().__init__(**kwargs) + self.access_key_id = access_key_id + self.secret_access_key = secret_access_key + + +class WorkspaceConnectionApiKey(_serialization.Model): + """Api key object for workspace connection credential. + + :ivar key: + :vartype key: str + """ + + _attribute_map = { + "key": {"key": "key", "type": "str"}, + } + + def __init__(self, *, key: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword key: + :paramtype key: str + """ + super().__init__(**kwargs) + self.key = key class WorkspaceConnectionManagedIdentity(_serialization.Model): """WorkspaceConnectionManagedIdentity. - :ivar resource_id: - :vartype resource_id: str :ivar client_id: :vartype client_id: str + :ivar resource_id: + :vartype resource_id: str """ _attribute_map = { - "resource_id": {"key": "resourceId", "type": "str"}, "client_id": {"key": "clientId", "type": "str"}, + "resource_id": {"key": "resourceId", "type": "str"}, } - def __init__(self, *, resource_id: Optional[str] = None, client_id: Optional[str] = None, **kwargs: Any) -> None: + def __init__(self, *, client_id: Optional[str] = None, resource_id: Optional[str] = None, **kwargs: Any) -> None: """ - :keyword resource_id: - :paramtype resource_id: str :keyword client_id: :paramtype client_id: str + :keyword resource_id: + :paramtype resource_id: str """ super().__init__(**kwargs) - self.resource_id = resource_id self.client_id = client_id + self.resource_id = resource_id class WorkspaceConnectionPersonalAccessToken(_serialization.Model): @@ -22591,35 +33650,74 @@ def __init__(self, *, properties: "_models.WorkspaceConnectionPropertiesV2", **k class WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult(_serialization.Model): """WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult. - Variables are only populated by the server, and will be ignored when sending a request. - + :ivar next_link: + :vartype next_link: str :ivar value: :vartype value: list[~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource] - :ivar next_link: - :vartype next_link: str """ - _validation = { - "next_link": {"readonly": True}, - } - _attribute_map = { - "value": {"key": "value", "type": "[WorkspaceConnectionPropertiesV2BasicResource]"}, "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[WorkspaceConnectionPropertiesV2BasicResource]"}, } def __init__( - self, *, value: Optional[List["_models.WorkspaceConnectionPropertiesV2BasicResource"]] = None, **kwargs: Any + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.WorkspaceConnectionPropertiesV2BasicResource"]] = None, + **kwargs: Any ) -> None: """ + :keyword next_link: + :paramtype next_link: str :keyword value: :paramtype value: list[~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource] """ super().__init__(**kwargs) + self.next_link = next_link self.value = value - self.next_link = None + + +class WorkspaceConnectionServicePrincipal(_serialization.Model): + """WorkspaceConnectionServicePrincipal. + + :ivar client_id: + :vartype client_id: str + :ivar client_secret: + :vartype client_secret: str + :ivar tenant_id: + :vartype tenant_id: str + """ + + _attribute_map = { + "client_id": {"key": "clientId", "type": "str"}, + "client_secret": {"key": "clientSecret", "type": "str"}, + "tenant_id": {"key": "tenantId", "type": "str"}, + } + + def __init__( + self, + *, + client_id: Optional[str] = None, + client_secret: Optional[str] = None, + tenant_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword client_id: + :paramtype client_id: str + :keyword client_secret: + :paramtype client_secret: str + :keyword tenant_id: + :paramtype tenant_id: str + """ + super().__init__(**kwargs) + self.client_id = client_id + self.client_secret = client_secret + self.tenant_id = tenant_id class WorkspaceConnectionSharedAccessSignature(_serialization.Model): @@ -22642,166 +33740,301 @@ def __init__(self, *, sas: Optional[str] = None, **kwargs: Any) -> None: self.sas = sas +class WorkspaceConnectionUpdateParameter(_serialization.Model): + """The properties that the machine learning workspace connection will be updated with. + + :ivar properties: The properties that the machine learning workspace connection will be updated + with. + :vartype properties: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2 + """ + + _attribute_map = { + "properties": {"key": "properties", "type": "WorkspaceConnectionPropertiesV2"}, + } + + def __init__( + self, *, properties: Optional["_models.WorkspaceConnectionPropertiesV2"] = None, **kwargs: Any + ) -> None: + """ + :keyword properties: The properties that the machine learning workspace connection will be + updated with. + :paramtype properties: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2 + """ + super().__init__(**kwargs) + self.properties = properties + + class WorkspaceConnectionUsernamePassword(_serialization.Model): """WorkspaceConnectionUsernamePassword. - :ivar username: - :vartype username: str :ivar password: :vartype password: str + :ivar username: + :vartype username: str """ _attribute_map = { - "username": {"key": "username", "type": "str"}, "password": {"key": "password", "type": "str"}, + "username": {"key": "username", "type": "str"}, } - def __init__(self, *, username: Optional[str] = None, password: Optional[str] = None, **kwargs: Any) -> None: + def __init__(self, *, password: Optional[str] = None, username: Optional[str] = None, **kwargs: Any) -> None: """ - :keyword username: - :paramtype username: str :keyword password: :paramtype password: str + :keyword username: + :paramtype username: str """ super().__init__(**kwargs) - self.username = username self.password = password + self.username = username + + +class WorkspaceHubConfig(_serialization.Model): + """WorkspaceHub's configuration object. + + :ivar additional_workspace_storage_accounts: + :vartype additional_workspace_storage_accounts: list[str] + :ivar default_workspace_resource_group: + :vartype default_workspace_resource_group: str + """ + + _attribute_map = { + "additional_workspace_storage_accounts": {"key": "additionalWorkspaceStorageAccounts", "type": "[str]"}, + "default_workspace_resource_group": {"key": "defaultWorkspaceResourceGroup", "type": "str"}, + } + + def __init__( + self, + *, + additional_workspace_storage_accounts: Optional[List[str]] = None, + default_workspace_resource_group: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_workspace_storage_accounts: + :paramtype additional_workspace_storage_accounts: list[str] + :keyword default_workspace_resource_group: + :paramtype default_workspace_resource_group: str + """ + super().__init__(**kwargs) + self.additional_workspace_storage_accounts = additional_workspace_storage_accounts + self.default_workspace_resource_group = default_workspace_resource_group class WorkspaceListResult(_serialization.Model): """The result of a request to list machine learning workspaces. + :ivar next_link: The link to the next page constructed using the continuationToken. If null, + there are no additional pages. + :vartype next_link: str :ivar value: The list of machine learning workspaces. Since this list may be incomplete, the nextLink field should be used to request the next list of machine learning workspaces. :vartype value: list[~azure.mgmt.machinelearningservices.models.Workspace] - :ivar next_link: The URI that can be used to request the next list of machine learning - workspaces. - :vartype next_link: str """ _attribute_map = { - "value": {"key": "value", "type": "[Workspace]"}, "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[Workspace]"}, } def __init__( - self, *, value: Optional[List["_models.Workspace"]] = None, next_link: Optional[str] = None, **kwargs: Any + self, *, next_link: Optional[str] = None, value: Optional[List["_models.Workspace"]] = None, **kwargs: Any ) -> None: """ + :keyword next_link: The link to the next page constructed using the continuationToken. If + null, there are no additional pages. + :paramtype next_link: str :keyword value: The list of machine learning workspaces. Since this list may be incomplete, the nextLink field should be used to request the next list of machine learning workspaces. :paramtype value: list[~azure.mgmt.machinelearningservices.models.Workspace] - :keyword next_link: The URI that can be used to request the next list of machine learning - workspaces. - :paramtype next_link: str """ super().__init__(**kwargs) - self.value = value self.next_link = next_link + self.value = value + + +class WorkspacePrivateEndpointResource(_serialization.Model): + """The Private Endpoint resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: e.g. + /subscriptions/{networkSubscriptionId}/resourceGroups/{rgName}/providers/Microsoft.Network/privateEndpoints/{privateEndpointName}. + :vartype id: str + :ivar subnet_arm_id: The subnetId that the private endpoint is connected to. + :vartype subnet_arm_id: str + """ + + _validation = { + "id": {"readonly": True}, + "subnet_arm_id": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "subnet_arm_id": {"key": "subnetArmId", "type": "str"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.id = None + self.subnet_arm_id = None class WorkspaceUpdateParameters(_serialization.Model): # pylint: disable=too-many-instance-attributes """The parameters for updating a machine learning workspace. + :ivar identity: Managed service identity (system assigned and/or user assigned identities). + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar sku: Optional. This field is required to be implemented by the RP because AML is + supporting more than one tier. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku :ivar tags: The resource tags for the machine learning workspace. :vartype tags: dict[str, str] - :ivar sku: The sku of the workspace. - :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku - :ivar identity: The identity of the resource. - :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar application_insights: ARM id of the application insights associated with this workspace. + :vartype application_insights: str + :ivar container_registry: ARM id of the container registry associated with this workspace. + :vartype container_registry: str :ivar description: The description of this workspace. :vartype description: str - :ivar friendly_name: The friendly name for this workspace. + :ivar enable_data_isolation: + :vartype enable_data_isolation: bool + :ivar encryption: + :vartype encryption: ~azure.mgmt.machinelearningservices.models.EncryptionUpdateProperties + :ivar feature_store_settings: Settings for feature store type workspace. + :vartype feature_store_settings: + ~azure.mgmt.machinelearningservices.models.FeatureStoreSettings + :ivar friendly_name: The friendly name for this workspace. This name in mutable. :vartype friendly_name: str :ivar image_build_compute: The compute name for image build. :vartype image_build_compute: str - :ivar service_managed_resources_settings: The service managed resource settings. - :vartype service_managed_resources_settings: - ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings + :ivar managed_network: Managed Network settings for a machine learning workspace. + :vartype managed_network: ~azure.mgmt.machinelearningservices.models.ManagedNetworkSettings :ivar primary_user_assigned_identity: The user assigned identity resource id that represents the workspace identity. :vartype primary_user_assigned_identity: str :ivar public_network_access: Whether requests from Public Network are allowed. Known values are: "Enabled" and "Disabled". :vartype public_network_access: str or - ~azure.mgmt.machinelearningservices.models.PublicNetworkAccess - :ivar application_insights: ARM id of the application insights associated with this workspace. - :vartype application_insights: str - :ivar container_registry: ARM id of the container registry associated with this workspace. - :vartype container_registry: str + ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType + :ivar service_managed_resources_settings: The service managed resource settings. + :vartype service_managed_resources_settings: + ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings + :ivar soft_delete_retention_in_days: Retention time in days after workspace get soft deleted. + :vartype soft_delete_retention_in_days: int + :ivar v1_legacy_mode: Enabling v1_legacy_mode may prevent you from using features provided by + the v2 API. + :vartype v1_legacy_mode: bool """ _attribute_map = { - "tags": {"key": "tags", "type": "{str}"}, - "sku": {"key": "sku", "type": "Sku"}, "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "sku": {"key": "sku", "type": "Sku"}, + "tags": {"key": "tags", "type": "{str}"}, + "application_insights": {"key": "properties.applicationInsights", "type": "str"}, + "container_registry": {"key": "properties.containerRegistry", "type": "str"}, "description": {"key": "properties.description", "type": "str"}, + "enable_data_isolation": {"key": "properties.enableDataIsolation", "type": "bool"}, + "encryption": {"key": "properties.encryption", "type": "EncryptionUpdateProperties"}, + "feature_store_settings": {"key": "properties.featureStoreSettings", "type": "FeatureStoreSettings"}, "friendly_name": {"key": "properties.friendlyName", "type": "str"}, "image_build_compute": {"key": "properties.imageBuildCompute", "type": "str"}, + "managed_network": {"key": "properties.managedNetwork", "type": "ManagedNetworkSettings"}, + "primary_user_assigned_identity": {"key": "properties.primaryUserAssignedIdentity", "type": "str"}, + "public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"}, "service_managed_resources_settings": { "key": "properties.serviceManagedResourcesSettings", "type": "ServiceManagedResourcesSettings", }, - "primary_user_assigned_identity": {"key": "properties.primaryUserAssignedIdentity", "type": "str"}, - "public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"}, - "application_insights": {"key": "properties.applicationInsights", "type": "str"}, - "container_registry": {"key": "properties.containerRegistry", "type": "str"}, + "soft_delete_retention_in_days": {"key": "properties.softDeleteRetentionInDays", "type": "int"}, + "v1_legacy_mode": {"key": "properties.v1LegacyMode", "type": "bool"}, } def __init__( self, *, - tags: Optional[Dict[str, str]] = None, - sku: Optional["_models.Sku"] = None, identity: Optional["_models.ManagedServiceIdentity"] = None, + sku: Optional["_models.Sku"] = None, + tags: Optional[Dict[str, str]] = None, + application_insights: Optional[str] = None, + container_registry: Optional[str] = None, description: Optional[str] = None, + enable_data_isolation: Optional[bool] = None, + encryption: Optional["_models.EncryptionUpdateProperties"] = None, + feature_store_settings: Optional["_models.FeatureStoreSettings"] = None, friendly_name: Optional[str] = None, image_build_compute: Optional[str] = None, - service_managed_resources_settings: Optional["_models.ServiceManagedResourcesSettings"] = None, + managed_network: Optional["_models.ManagedNetworkSettings"] = None, primary_user_assigned_identity: Optional[str] = None, - public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None, - application_insights: Optional[str] = None, - container_registry: Optional[str] = None, + public_network_access: Optional[Union[str, "_models.PublicNetworkAccessType"]] = None, + service_managed_resources_settings: Optional["_models.ServiceManagedResourcesSettings"] = None, + soft_delete_retention_in_days: Optional[int] = None, + v1_legacy_mode: Optional[bool] = None, **kwargs: Any ) -> None: """ + :keyword identity: Managed service identity (system assigned and/or user assigned identities). + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword sku: Optional. This field is required to be implemented by the RP because AML is + supporting more than one tier. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku :keyword tags: The resource tags for the machine learning workspace. :paramtype tags: dict[str, str] - :keyword sku: The sku of the workspace. - :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku - :keyword identity: The identity of the resource. - :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword application_insights: ARM id of the application insights associated with this + workspace. + :paramtype application_insights: str + :keyword container_registry: ARM id of the container registry associated with this workspace. + :paramtype container_registry: str :keyword description: The description of this workspace. :paramtype description: str - :keyword friendly_name: The friendly name for this workspace. + :keyword enable_data_isolation: + :paramtype enable_data_isolation: bool + :keyword encryption: + :paramtype encryption: ~azure.mgmt.machinelearningservices.models.EncryptionUpdateProperties + :keyword feature_store_settings: Settings for feature store type workspace. + :paramtype feature_store_settings: + ~azure.mgmt.machinelearningservices.models.FeatureStoreSettings + :keyword friendly_name: The friendly name for this workspace. This name in mutable. :paramtype friendly_name: str :keyword image_build_compute: The compute name for image build. :paramtype image_build_compute: str - :keyword service_managed_resources_settings: The service managed resource settings. - :paramtype service_managed_resources_settings: - ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings + :keyword managed_network: Managed Network settings for a machine learning workspace. + :paramtype managed_network: ~azure.mgmt.machinelearningservices.models.ManagedNetworkSettings :keyword primary_user_assigned_identity: The user assigned identity resource id that represents the workspace identity. :paramtype primary_user_assigned_identity: str :keyword public_network_access: Whether requests from Public Network are allowed. Known values are: "Enabled" and "Disabled". :paramtype public_network_access: str or - ~azure.mgmt.machinelearningservices.models.PublicNetworkAccess - :keyword application_insights: ARM id of the application insights associated with this - workspace. - :paramtype application_insights: str - :keyword container_registry: ARM id of the container registry associated with this workspace. - :paramtype container_registry: str + ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType + :keyword service_managed_resources_settings: The service managed resource settings. + :paramtype service_managed_resources_settings: + ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings + :keyword soft_delete_retention_in_days: Retention time in days after workspace get soft + deleted. + :paramtype soft_delete_retention_in_days: int + :keyword v1_legacy_mode: Enabling v1_legacy_mode may prevent you from using features provided + by the v2 API. + :paramtype v1_legacy_mode: bool """ super().__init__(**kwargs) - self.tags = tags - self.sku = sku self.identity = identity + self.sku = sku + self.tags = tags + self.application_insights = application_insights + self.container_registry = container_registry self.description = description + self.enable_data_isolation = enable_data_isolation + self.encryption = encryption + self.feature_store_settings = feature_store_settings self.friendly_name = friendly_name self.image_build_compute = image_build_compute - self.service_managed_resources_settings = service_managed_resources_settings + self.managed_network = managed_network self.primary_user_assigned_identity = primary_user_assigned_identity self.public_network_access = public_network_access - self.application_insights = application_insights - self.container_registry = container_registry + self.service_managed_resources_settings = service_managed_resources_settings + self.soft_delete_retention_in_days = soft_delete_retention_in_days + self.v1_legacy_mode = v1_legacy_mode diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/__init__.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/__init__.py index 4967e3af6930..95c8e68c6e97 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/__init__.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/__init__.py @@ -6,15 +6,10 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._operations import Operations -from ._workspaces_operations import WorkspacesOperations from ._usages_operations import UsagesOperations from ._virtual_machine_sizes_operations import VirtualMachineSizesOperations from ._quotas_operations import QuotasOperations from ._compute_operations import ComputeOperations -from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations -from ._private_link_resources_operations import PrivateLinkResourcesOperations -from ._workspace_connections_operations import WorkspaceConnectionsOperations from ._registry_code_containers_operations import RegistryCodeContainersOperations from ._registry_code_versions_operations import RegistryCodeVersionsOperations from ._registry_component_containers_operations import RegistryComponentContainersOperations @@ -36,29 +31,41 @@ from ._datastores_operations import DatastoresOperations from ._environment_containers_operations import EnvironmentContainersOperations from ._environment_versions_operations import EnvironmentVersionsOperations +from ._featureset_containers_operations import FeaturesetContainersOperations +from ._features_operations import FeaturesOperations +from ._featureset_versions_operations import FeaturesetVersionsOperations +from ._featurestore_entity_containers_operations import FeaturestoreEntityContainersOperations +from ._featurestore_entity_versions_operations import FeaturestoreEntityVersionsOperations +from ._inference_pools_operations import InferencePoolsOperations +from ._inference_groups_operations import InferenceGroupsOperations +from ._inference_endpoints_operations import InferenceEndpointsOperations from ._jobs_operations import JobsOperations +from ._labeling_jobs_operations import LabelingJobsOperations from ._model_containers_operations import ModelContainersOperations from ._model_versions_operations import ModelVersionsOperations from ._online_endpoints_operations import OnlineEndpointsOperations from ._online_deployments_operations import OnlineDeploymentsOperations from ._schedules_operations import SchedulesOperations +from ._serverless_endpoints_operations import ServerlessEndpointsOperations from ._registries_operations import RegistriesOperations from ._workspace_features_operations import WorkspaceFeaturesOperations +from ._operations import Operations +from ._workspaces_operations import WorkspacesOperations +from ._workspace_connections_operations import WorkspaceConnectionsOperations +from ._managed_network_settings_rule_operations import ManagedNetworkSettingsRuleOperations +from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations +from ._private_link_resources_operations import PrivateLinkResourcesOperations +from ._managed_network_provisions_operations import ManagedNetworkProvisionsOperations from ._patch import __all__ as _patch_all from ._patch import * # pylint: disable=unused-wildcard-import from ._patch import patch_sdk as _patch_sdk __all__ = [ - "Operations", - "WorkspacesOperations", "UsagesOperations", "VirtualMachineSizesOperations", "QuotasOperations", "ComputeOperations", - "PrivateEndpointConnectionsOperations", - "PrivateLinkResourcesOperations", - "WorkspaceConnectionsOperations", "RegistryCodeContainersOperations", "RegistryCodeVersionsOperations", "RegistryComponentContainersOperations", @@ -80,14 +87,31 @@ "DatastoresOperations", "EnvironmentContainersOperations", "EnvironmentVersionsOperations", + "FeaturesetContainersOperations", + "FeaturesOperations", + "FeaturesetVersionsOperations", + "FeaturestoreEntityContainersOperations", + "FeaturestoreEntityVersionsOperations", + "InferencePoolsOperations", + "InferenceGroupsOperations", + "InferenceEndpointsOperations", "JobsOperations", + "LabelingJobsOperations", "ModelContainersOperations", "ModelVersionsOperations", "OnlineEndpointsOperations", "OnlineDeploymentsOperations", "SchedulesOperations", + "ServerlessEndpointsOperations", "RegistriesOperations", "WorkspaceFeaturesOperations", + "Operations", + "WorkspacesOperations", + "WorkspaceConnectionsOperations", + "ManagedNetworkSettingsRuleOperations", + "PrivateEndpointConnectionsOperations", + "PrivateLinkResourcesOperations", + "ManagedNetworkProvisionsOperations", ] __all__.extend([p for p in _patch_all if p not in __all__]) _patch_sdk() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_deployments_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_deployments_operations.py index 7fcc2d9cfc00..188d3c484b29 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_deployments_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_deployments_operations.py @@ -30,7 +30,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -53,7 +53,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -72,7 +72,7 @@ def build_list_request( "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -100,7 +100,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -120,7 +120,7 @@ def build_delete_request( "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -142,7 +142,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -162,7 +162,7 @@ def build_get_request( "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -184,7 +184,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -209,7 +209,7 @@ def build_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -233,7 +233,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -258,7 +258,7 @@ def build_create_or_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_endpoints_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_endpoints_operations.py index afa2aebfed20..b8b914a193a4 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_endpoints_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_endpoints_operations.py @@ -30,7 +30,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -51,7 +51,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -69,7 +69,7 @@ def build_list_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -90,7 +90,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -109,7 +109,7 @@ def build_delete_request( "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -126,7 +126,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -145,7 +145,7 @@ def build_get_request( "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -162,7 +162,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -184,7 +184,7 @@ def build_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -203,7 +203,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -225,7 +225,7 @@ def build_create_or_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -244,7 +244,7 @@ def build_list_keys_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -263,7 +263,7 @@ def build_list_keys_request( "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_containers_operations.py index bd3a7ea7d400..9c6b9fa84aaa 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_containers_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_containers_operations.py @@ -28,7 +28,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -43,7 +43,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -61,7 +61,7 @@ def build_list_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -80,7 +80,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -99,7 +99,7 @@ def build_delete_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -116,7 +116,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -135,7 +135,7 @@ def build_get_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -152,7 +152,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -172,7 +172,7 @@ def build_create_or_update_request( "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_versions_operations.py index 85aa291231f0..ca2653f0bf77 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_versions_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_versions_operations.py @@ -28,7 +28,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -53,7 +53,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -72,7 +72,7 @@ def build_list_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -99,7 +99,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -119,7 +119,7 @@ def build_delete_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -136,7 +136,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -156,7 +156,7 @@ def build_get_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -173,7 +173,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -194,7 +194,7 @@ def build_create_or_update_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -213,7 +213,7 @@ def build_create_or_get_start_pending_upload_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -234,7 +234,7 @@ def build_create_or_get_start_pending_upload_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_component_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_component_containers_operations.py index 0bc38057f344..3e3abda2fb72 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_component_containers_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_component_containers_operations.py @@ -28,7 +28,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -49,7 +49,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -67,7 +67,7 @@ def build_list_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -88,7 +88,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -107,7 +107,7 @@ def build_delete_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -124,7 +124,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -143,7 +143,7 @@ def build_get_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -160,7 +160,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -180,7 +180,7 @@ def build_create_or_update_request( "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_component_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_component_versions_operations.py index 932e842e43de..4c2c245686e4 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_component_versions_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_component_versions_operations.py @@ -28,7 +28,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -47,12 +47,13 @@ def build_list_request( top: Optional[int] = None, skip: Optional[str] = None, list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -71,7 +72,7 @@ def build_list_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -83,6 +84,8 @@ def build_list_request( _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if list_view_type is not None: _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + if stage is not None: + _params["stage"] = _SERIALIZER.query("stage", stage, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -96,7 +99,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -116,7 +119,7 @@ def build_delete_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -133,7 +136,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -153,7 +156,7 @@ def build_get_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -170,7 +173,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -191,7 +194,7 @@ def build_create_or_update_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -233,6 +236,7 @@ def list( top: Optional[int] = None, skip: Optional[str] = None, list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, **kwargs: Any ) -> Iterable["_models.ComponentVersion"]: """List component versions. @@ -255,6 +259,8 @@ def list( :param list_view_type: View type for including/excluding (for example) archived entities. Known values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param stage: Component stage. Default value is None. + :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ComponentVersion or the result of cls(response) :rtype: @@ -287,6 +293,7 @@ def prepare_request(next_link=None): top=top, skip=skip, list_view_type=list_view_type, + stage=stage, api_version=api_version, template_url=self.list.metadata["url"], headers=_headers, diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_compute_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_compute_operations.py index 349245e851f3..f97c30107af4 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_compute_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_compute_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, List, Optional, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -30,7 +30,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -45,7 +45,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -63,7 +63,7 @@ def build_list_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -82,7 +82,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -98,10 +98,12 @@ def build_get_request( "workspaceName": _SERIALIZER.url( "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" ), - "computeName": _SERIALIZER.url("compute_name", compute_name, "str"), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -118,7 +120,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -135,10 +137,12 @@ def build_create_or_update_request( "workspaceName": _SERIALIZER.url( "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" ), - "computeName": _SERIALIZER.url("compute_name", compute_name, "str"), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -157,7 +161,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -174,10 +178,12 @@ def build_update_request( "workspaceName": _SERIALIZER.url( "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" ), - "computeName": _SERIALIZER.url("compute_name", compute_name, "str"), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -202,7 +208,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -218,10 +224,12 @@ def build_delete_request( "workspaceName": _SERIALIZER.url( "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" ), - "computeName": _SERIALIZER.url("compute_name", compute_name, "str"), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -235,13 +243,54 @@ def build_delete_request( return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) +def build_update_custom_services_request( + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/customServices", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + def build_list_nodes_request( resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -257,10 +306,12 @@ def build_list_nodes_request( "workspaceName": _SERIALIZER.url( "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" ), - "computeName": _SERIALIZER.url("compute_name", compute_name, "str"), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -277,7 +328,7 @@ def build_list_keys_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -293,10 +344,12 @@ def build_list_keys_request( "workspaceName": _SERIALIZER.url( "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" ), - "computeName": _SERIALIZER.url("compute_name", compute_name, "str"), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -313,7 +366,7 @@ def build_start_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -329,10 +382,12 @@ def build_start_request( "workspaceName": _SERIALIZER.url( "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" ), - "computeName": _SERIALIZER.url("compute_name", compute_name, "str"), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -349,7 +404,7 @@ def build_stop_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -365,10 +420,12 @@ def build_stop_request( "workspaceName": _SERIALIZER.url( "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" ), - "computeName": _SERIALIZER.url("compute_name", compute_name, "str"), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -385,7 +442,7 @@ def build_restart_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -401,21 +458,143 @@ def build_restart_request( "workspaceName": _SERIALIZER.url( "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" ), - "computeName": _SERIALIZER.url("compute_name", compute_name, "str"), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_idle_shutdown_setting_request( + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/updateIdleShutdownSetting", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_allowed_resize_sizes_request( + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/getAllowedVmSizesForResize", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_resize_request( + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/resize", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -class ComputeOperations: +class ComputeOperations: # pylint: disable=too-many-public-methods """ .. warning:: **DO NOT** instantiate this class directly. @@ -1233,11 +1412,18 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}" } - @distributed_trace - def list_nodes( - self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any - ) -> Iterable["_models.AmlComputeNodeInformation"]: - """Get the details (e.g IP address, port etc) of all the compute nodes in the compute. + @overload + def update_custom_services( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + custom_services: List[_models.CustomService], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Updates the custom services list. The list of custom services provided shall be overwritten. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -1246,19 +1432,77 @@ def list_nodes( :type workspace_name: str :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str + :param custom_services: New list of Custom Services. Required. + :type custom_services: list[~azure.mgmt.machinelearningservices.models.CustomService] + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either AmlComputeNodeInformation or the result of - cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.AmlComputeNodeInformation] + :return: None or the result of cls(response) + :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.AmlComputeNodesInformation] = kwargs.pop("cls", None) + @overload + def update_custom_services( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + custom_services: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Updates the custom services list. The list of custom services provided shall be overwritten. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param custom_services: New list of Custom Services. Required. + :type custom_services: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update_custom_services( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + custom_services: Union[List[_models.CustomService], IO], + **kwargs: Any + ) -> None: + """Updates the custom services list. The list of custom services provided shall be overwritten. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param custom_services: New list of Custom Services. Is either a [CustomService] type or a IO + type. Required. + :type custom_services: list[~azure.mgmt.machinelearningservices.models.CustomService] or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1267,41 +1511,125 @@ def list_nodes( } error_map.update(kwargs.pop("error_map", {}) or {}) - def prepare_request(next_link=None): - if not next_link: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - request = build_list_nodes_request( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - compute_name=compute_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=self.list_nodes.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(custom_services, (IOBase, bytes)): + _content = custom_services + else: + _json = self._serialize.body(custom_services, "[CustomService]") - def extract_data(pipeline_response): + request = build_update_custom_services_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.update_custom_services.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + update_custom_services.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/customServices" + } + + @distributed_trace + def list_nodes( + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> Iterable["_models.AmlComputeNodeInformation"]: + """Get the details (e.g IP address, port etc) of all the compute nodes in the compute. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either AmlComputeNodeInformation or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.AmlComputeNodeInformation] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.AmlComputeNodesInformation] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_nodes_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_nodes.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + def extract_data(pipeline_response): deserialized = self._deserialize("AmlComputeNodesInformation", pipeline_response) list_of_elem = deserialized.nodes if cls: @@ -1751,3 +2079,452 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- begin_restart.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart" } + + @overload + def update_idle_shutdown_setting( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.IdleShutdownSetting, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Updates the idle shutdown setting of a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating idle shutdown setting of specified ComputeInstance. + Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.IdleShutdownSetting + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update_idle_shutdown_setting( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Updates the idle shutdown setting of a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating idle shutdown setting of specified ComputeInstance. + Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update_idle_shutdown_setting( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.IdleShutdownSetting, IO], + **kwargs: Any + ) -> None: + """Updates the idle shutdown setting of a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating idle shutdown setting of specified ComputeInstance. + Is either a IdleShutdownSetting type or a IO type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.IdleShutdownSetting or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "IdleShutdownSetting") + + request = build_update_idle_shutdown_setting_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.update_idle_shutdown_setting.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + update_idle_shutdown_setting.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/updateIdleShutdownSetting" + } + + @distributed_trace + def get_allowed_resize_sizes( + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> _models.VirtualMachineSizeListResult: + """Returns supported virtual machine sizes for resize. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: VirtualMachineSizeListResult or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.VirtualMachineSizeListResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.VirtualMachineSizeListResult] = kwargs.pop("cls", None) + + request = build_get_allowed_resize_sizes_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_allowed_resize_sizes.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("VirtualMachineSizeListResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_allowed_resize_sizes.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/getAllowedVmSizesForResize" + } + + def _resize_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.ResizeSchema, IO], + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "ResizeSchema") + + request = build_resize_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._resize_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _resize_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/resize" + } + + @overload + def begin_resize( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.ResizeSchema, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Updates the size of a Compute Instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating VM size setting of specified Compute Instance. + Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.ResizeSchema + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_resize( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Updates the size of a Compute Instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating VM size setting of specified Compute Instance. + Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_resize( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.ResizeSchema, IO], + **kwargs: Any + ) -> LROPoller[None]: + """Updates the size of a Compute Instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating VM size setting of specified Compute Instance. Is + either a ResizeSchema type or a IO type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.ResizeSchema or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._resize_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_resize.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/resize" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_containers_operations.py index 48c5dee07f2c..db399057899d 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_containers_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_containers_operations.py @@ -28,7 +28,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -49,7 +49,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -67,7 +67,7 @@ def build_list_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -88,7 +88,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -107,7 +107,7 @@ def build_delete_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -124,7 +124,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -143,7 +143,7 @@ def build_get_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -160,7 +160,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -180,7 +180,7 @@ def build_create_or_update_request( "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_versions_operations.py index e355736e9350..8fdd1c819812 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_versions_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_versions_operations.py @@ -28,7 +28,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -48,12 +48,13 @@ def build_list_request( skip: Optional[str] = None, tags: Optional[str] = None, list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -72,7 +73,7 @@ def build_list_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -86,6 +87,8 @@ def build_list_request( _params["$tags"] = _SERIALIZER.query("tags", tags, "str") if list_view_type is not None: _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + if stage is not None: + _params["stage"] = _SERIALIZER.query("stage", stage, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -99,7 +102,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -119,7 +122,7 @@ def build_delete_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -136,7 +139,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -156,7 +159,7 @@ def build_get_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -173,7 +176,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -194,7 +197,7 @@ def build_create_or_update_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -237,6 +240,7 @@ def list( skip: Optional[str] = None, tags: Optional[str] = None, list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, **kwargs: Any ) -> Iterable["_models.DataVersionBase"]: """List data versions in the data container. @@ -266,6 +270,8 @@ def list( ListViewType.All]View type for including/excluding (for example) archived entities. Known values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param stage: data stage. Default value is None. + :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DataVersionBase or the result of cls(response) :rtype: @@ -299,6 +305,7 @@ def prepare_request(next_link=None): skip=skip, tags=tags, list_view_type=list_view_type, + stage=stage, api_version=api_version, template_url=self.list.metadata["url"], headers=_headers, diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_datastores_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_datastores_operations.py index 651ac883ae0f..6c36aea5efd2 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_datastores_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_datastores_operations.py @@ -28,7 +28,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -54,7 +54,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -72,7 +72,7 @@ def build_list_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -103,7 +103,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -122,7 +122,7 @@ def build_delete_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -139,7 +139,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -158,7 +158,7 @@ def build_get_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -181,7 +181,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -201,7 +201,7 @@ def build_create_or_update_request( "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -222,7 +222,7 @@ def build_list_secrets_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -241,7 +241,7 @@ def build_list_secrets_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_containers_operations.py index d65ee4225ec4..d87d9b92165c 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_containers_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_containers_operations.py @@ -28,7 +28,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -49,7 +49,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -67,7 +67,7 @@ def build_list_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -88,7 +88,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -107,7 +107,7 @@ def build_delete_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -124,7 +124,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -143,7 +143,7 @@ def build_get_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -160,7 +160,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -180,7 +180,7 @@ def build_create_or_update_request( "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_versions_operations.py index fc163b0b3bb7..1b5a08843c57 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_versions_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_versions_operations.py @@ -28,7 +28,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -47,12 +47,13 @@ def build_list_request( top: Optional[int] = None, skip: Optional[str] = None, list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -71,7 +72,7 @@ def build_list_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -83,6 +84,8 @@ def build_list_request( _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if list_view_type is not None: _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + if stage is not None: + _params["stage"] = _SERIALIZER.query("stage", stage, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -96,7 +99,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -116,7 +119,7 @@ def build_delete_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -133,7 +136,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -153,7 +156,7 @@ def build_get_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -170,7 +173,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -191,7 +194,7 @@ def build_create_or_update_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -233,6 +236,7 @@ def list( top: Optional[int] = None, skip: Optional[str] = None, list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, **kwargs: Any ) -> Iterable["_models.EnvironmentVersion"]: """List versions. @@ -255,6 +259,9 @@ def list( :param list_view_type: View type for including/excluding (for example) archived entities. Known values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param stage: Stage for including/excluding (for example) archived entities. Takes priority + over listViewType. Default value is None. + :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either EnvironmentVersion or the result of cls(response) :rtype: @@ -287,6 +294,7 @@ def prepare_request(next_link=None): top=top, skip=skip, list_view_type=list_view_type, + stage=stage, api_version=api_version, template_url=self.list.metadata["url"], headers=_headers, diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_features_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_features_operations.py new file mode 100644 index 000000000000..c2851df51c14 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_features_operations.py @@ -0,0 +1,381 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + featureset_name: str, + featureset_version: str, + subscription_id: str, + *, + skip: Optional[str] = None, + tags: Optional[str] = None, + feature_name: Optional[str] = None, + description: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 1000, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{featuresetName}/versions/{featuresetVersion}/features", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "featuresetName": _SERIALIZER.url( + "featureset_name", featureset_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + "featuresetVersion": _SERIALIZER.url("featureset_version", featureset_version, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if tags is not None: + _params["tags"] = _SERIALIZER.query("tags", tags, "str") + if feature_name is not None: + _params["featureName"] = _SERIALIZER.query("feature_name", feature_name, "str") + if description is not None: + _params["description"] = _SERIALIZER.query("description", description, "str") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + if page_size is not None: + _params["pageSize"] = _SERIALIZER.query("page_size", page_size, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, + workspace_name: str, + featureset_name: str, + featureset_version: str, + feature_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{featuresetName}/versions/{featuresetVersion}/features/{featureName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "featuresetName": _SERIALIZER.url( + "featureset_name", featureset_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + "featuresetVersion": _SERIALIZER.url("featureset_version", featureset_version, "str"), + "featureName": _SERIALIZER.url( + "feature_name", feature_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class FeaturesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.MachineLearningServicesMgmtClient`'s + :attr:`features` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + featureset_name: str, + featureset_version: str, + skip: Optional[str] = None, + tags: Optional[str] = None, + feature_name: Optional[str] = None, + description: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 1000, + **kwargs: Any + ) -> Iterable["_models.Feature"]: + """List Features. + + List Features. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param featureset_name: Featureset name. This is case-sensitive. Required. + :type featureset_name: str + :param featureset_version: Featureset Version identifier. This is case-sensitive. Required. + :type featureset_version: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :type tags: str + :param feature_name: feature name. Default value is None. + :type feature_name: str + :param description: Description of the featureset. Default value is None. + :type description: str + :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param page_size: Page size. Default value is 1000. + :type page_size: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either Feature or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.Feature] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeatureResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + featureset_name=featureset_name, + featureset_version=featureset_version, + subscription_id=self._config.subscription_id, + skip=skip, + tags=tags, + feature_name=feature_name, + description=description, + list_view_type=list_view_type, + page_size=page_size, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("FeatureResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{featuresetName}/versions/{featuresetVersion}/features" + } + + @distributed_trace + def get( + self, + resource_group_name: str, + workspace_name: str, + featureset_name: str, + featureset_version: str, + feature_name: str, + **kwargs: Any + ) -> _models.Feature: + """Get feature. + + Get feature. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param featureset_name: Feature set name. This is case-sensitive. Required. + :type featureset_name: str + :param featureset_version: Feature set version identifier. This is case-sensitive. Required. + :type featureset_version: str + :param feature_name: Feature Name. This is case-sensitive. Required. + :type feature_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Feature or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Feature + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.Feature] = kwargs.pop("cls", None) + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + featureset_name=featureset_name, + featureset_version=featureset_version, + feature_name=feature_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("Feature", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{featuresetName}/versions/{featuresetVersion}/features/{featureName}" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_featureset_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_featureset_containers_operations.py new file mode 100644 index 000000000000..23e64fa6c217 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_featureset_containers_operations.py @@ -0,0 +1,814 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, + name: Optional[str] = None, + description: Optional[str] = None, + created_by: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if tags is not None: + _params["tags"] = _SERIALIZER.query("tags", tags, "str") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + if page_size is not None: + _params["pageSize"] = _SERIALIZER.query("page_size", page_size, "int") + if name is not None: + _params["name"] = _SERIALIZER.query("name", name, "str") + if description is not None: + _params["description"] = _SERIALIZER.query("description", description, "str") + if created_by is not None: + _params["createdBy"] = _SERIALIZER.query("created_by", created_by, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_entity_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class FeaturesetContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.MachineLearningServicesMgmtClient`'s + :attr:`featureset_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, + name: Optional[str] = None, + description: Optional[str] = None, + created_by: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.FeaturesetContainer"]: + """List featurestore entity containers. + + List featurestore entity containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :type tags: str + :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param page_size: page size. Default value is 20. + :type page_size: int + :param name: name for the featureset. Default value is None. + :type name: str + :param description: description for the feature set. Default value is None. + :type description: str + :param created_by: createdBy user name. Default value is None. + :type created_by: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either FeaturesetContainer or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.FeaturesetContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturesetContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + tags=tags, + list_view_type=list_view_type, + page_size=page_size, + name=name, + description=description, + created_by=created_by, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("FeaturesetContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets" + } + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}" + } + + @distributed_trace + def begin_delete(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> LROPoller[None]: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}" + } + + @distributed_trace + def get_entity( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.FeaturesetContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FeaturesetContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.FeaturesetContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturesetContainer] = kwargs.pop("cls", None) + + request = build_get_entity_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_entity.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("FeaturesetContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_entity.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}" + } + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.FeaturesetContainer, IO], + **kwargs: Any + ) -> _models.FeaturesetContainer: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturesetContainer] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "FeaturesetContainer") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("FeaturesetContainer", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("FeaturesetContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.FeaturesetContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FeaturesetContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturesetContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FeaturesetContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturesetContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.FeaturesetContainer, IO], + **kwargs: Any + ) -> LROPoller[_models.FeaturesetContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Is either a FeaturesetContainer type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturesetContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturesetContainer] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("FeaturesetContainer", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_featureset_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_featureset_versions_operations.py new file mode 100644 index 000000000000..96716fd615a8 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_featureset_versions_operations.py @@ -0,0 +1,1162 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, + version_name: Optional[str] = None, + version: Optional[str] = None, + description: Optional[str] = None, + created_by: Optional[str] = None, + stage: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if tags is not None: + _params["tags"] = _SERIALIZER.query("tags", tags, "str") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + if page_size is not None: + _params["pageSize"] = _SERIALIZER.query("page_size", page_size, "int") + if version_name is not None: + _params["versionName"] = _SERIALIZER.query("version_name", version_name, "str") + if version is not None: + _params["version"] = _SERIALIZER.query("version", version, "str") + if description is not None: + _params["description"] = _SERIALIZER.query("description", description, "str") + if created_by is not None: + _params["createdBy"] = _SERIALIZER.query("created_by", created_by, "str") + if stage is not None: + _params["stage"] = _SERIALIZER.query("stage", stage, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_backfill_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}/backfill", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class FeaturesetVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.MachineLearningServicesMgmtClient`'s + :attr:`featureset_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, + version_name: Optional[str] = None, + version: Optional[str] = None, + description: Optional[str] = None, + created_by: Optional[str] = None, + stage: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.FeaturesetVersion"]: + """List versions. + + List versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Featureset name. This is case-sensitive. Required. + :type name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :type tags: str + :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param page_size: page size. Default value is 20. + :type page_size: int + :param version_name: name for the featureset version. Default value is None. + :type version_name: str + :param version: featureset version. Default value is None. + :type version: str + :param description: description for the feature set version. Default value is None. + :type description: str + :param created_by: createdBy user name. Default value is None. + :type created_by: str + :param stage: Specifies the featurestore stage. Default value is None. + :type stage: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either FeaturesetVersion or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.FeaturesetVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturesetVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + skip=skip, + tags=tags, + list_view_type=list_view_type, + page_size=page_size, + version_name=version_name, + version=version, + description=description, + created_by=created_by, + stage=stage, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("FeaturesetVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions" + } + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}" + } + + @distributed_trace + def begin_delete( + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}" + } + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.FeaturesetVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FeaturesetVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.FeaturesetVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturesetVersion] = kwargs.pop("cls", None) + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("FeaturesetVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}" + } + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.FeaturesetVersion, IO], + **kwargs: Any + ) -> _models.FeaturesetVersion: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturesetVersion] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "FeaturesetVersion") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("FeaturesetVersion", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("FeaturesetVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.FeaturesetVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FeaturesetVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturesetVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FeaturesetVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturesetVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.FeaturesetVersion, IO], + **kwargs: Any + ) -> LROPoller[_models.FeaturesetVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Is either a FeaturesetVersion type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturesetVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturesetVersion] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("FeaturesetVersion", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}" + } + + def _backfill_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.FeaturesetVersionBackfillRequest, IO], + **kwargs: Any + ) -> Optional[_models.FeaturesetVersionBackfillResponse]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.FeaturesetVersionBackfillResponse]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "FeaturesetVersionBackfillRequest") + + request = build_backfill_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._backfill_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("FeaturesetVersionBackfillResponse", pipeline_response) + + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _backfill_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}/backfill" + } + + @overload + def begin_backfill( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.FeaturesetVersionBackfillRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FeaturesetVersionBackfillResponse]: + """Backfill. + + Backfill. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Feature set version backfill request entity. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetVersionBackfillRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturesetVersionBackfillResponse or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersionBackfillResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_backfill( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FeaturesetVersionBackfillResponse]: + """Backfill. + + Backfill. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Feature set version backfill request entity. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturesetVersionBackfillResponse or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersionBackfillResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_backfill( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.FeaturesetVersionBackfillRequest, IO], + **kwargs: Any + ) -> LROPoller[_models.FeaturesetVersionBackfillResponse]: + """Backfill. + + Backfill. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Feature set version backfill request entity. Is either a + FeaturesetVersionBackfillRequest type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetVersionBackfillRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturesetVersionBackfillResponse or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersionBackfillResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturesetVersionBackfillResponse] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._backfill_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("FeaturesetVersionBackfillResponse", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_backfill.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}/backfill" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_featurestore_entity_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_featurestore_entity_containers_operations.py new file mode 100644 index 000000000000..e65238c3984e --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_featurestore_entity_containers_operations.py @@ -0,0 +1,815 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, + name: Optional[str] = None, + description: Optional[str] = None, + created_by: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if tags is not None: + _params["tags"] = _SERIALIZER.query("tags", tags, "str") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + if page_size is not None: + _params["pageSize"] = _SERIALIZER.query("page_size", page_size, "int") + if name is not None: + _params["name"] = _SERIALIZER.query("name", name, "str") + if description is not None: + _params["description"] = _SERIALIZER.query("description", description, "str") + if created_by is not None: + _params["createdBy"] = _SERIALIZER.query("created_by", created_by, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_entity_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class FeaturestoreEntityContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.MachineLearningServicesMgmtClient`'s + :attr:`featurestore_entity_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, + name: Optional[str] = None, + description: Optional[str] = None, + created_by: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.FeaturestoreEntityContainer"]: + """List featurestore entity containers. + + List featurestore entity containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :type tags: str + :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param page_size: page size. Default value is 20. + :type page_size: int + :param name: name for the featurestore entity. Default value is None. + :type name: str + :param description: description for the featurestore entity. Default value is None. + :type description: str + :param created_by: createdBy user name. Default value is None. + :type created_by: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either FeaturestoreEntityContainer or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturestoreEntityContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + tags=tags, + list_view_type=list_view_type, + page_size=page_size, + name=name, + description=description, + created_by=created_by, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("FeaturestoreEntityContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities" + } + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}" + } + + @distributed_trace + def begin_delete(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> LROPoller[None]: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}" + } + + @distributed_trace + def get_entity( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.FeaturestoreEntityContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FeaturestoreEntityContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturestoreEntityContainer] = kwargs.pop("cls", None) + + request = build_get_entity_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_entity.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("FeaturestoreEntityContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_entity.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}" + } + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.FeaturestoreEntityContainer, IO], + **kwargs: Any + ) -> _models.FeaturestoreEntityContainer: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturestoreEntityContainer] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "FeaturestoreEntityContainer") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("FeaturestoreEntityContainer", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("FeaturestoreEntityContainer", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.FeaturestoreEntityContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FeaturestoreEntityContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturestoreEntityContainer or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FeaturestoreEntityContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturestoreEntityContainer or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.FeaturestoreEntityContainer, IO], + **kwargs: Any + ) -> LROPoller[_models.FeaturestoreEntityContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Is either a FeaturestoreEntityContainer type + or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturestoreEntityContainer or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturestoreEntityContainer] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("FeaturestoreEntityContainer", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_featurestore_entity_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_featurestore_entity_versions_operations.py new file mode 100644 index 000000000000..844aa52e4f70 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_featurestore_entity_versions_operations.py @@ -0,0 +1,859 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, + version_name: Optional[str] = None, + version: Optional[str] = None, + description: Optional[str] = None, + created_by: Optional[str] = None, + stage: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if tags is not None: + _params["tags"] = _SERIALIZER.query("tags", tags, "str") + if list_view_type is not None: + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + if page_size is not None: + _params["pageSize"] = _SERIALIZER.query("page_size", page_size, "int") + if version_name is not None: + _params["versionName"] = _SERIALIZER.query("version_name", version_name, "str") + if version is not None: + _params["version"] = _SERIALIZER.query("version", version, "str") + if description is not None: + _params["description"] = _SERIALIZER.query("description", description, "str") + if created_by is not None: + _params["createdBy"] = _SERIALIZER.query("created_by", created_by, "str") + if stage is not None: + _params["stage"] = _SERIALIZER.query("stage", stage, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class FeaturestoreEntityVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.MachineLearningServicesMgmtClient`'s + :attr:`featurestore_entity_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, + version_name: Optional[str] = None, + version: Optional[str] = None, + description: Optional[str] = None, + created_by: Optional[str] = None, + stage: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.FeaturestoreEntityVersion"]: + """List versions. + + List versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Feature entity name. This is case-sensitive. Required. + :type name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :type tags: str + :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param page_size: page size. Default value is 20. + :type page_size: int + :param version_name: name for the featurestore entity version. Default value is None. + :type version_name: str + :param version: featurestore entity version. Default value is None. + :type version: str + :param description: description for the feature entity version. Default value is None. + :type description: str + :param created_by: createdBy user name. Default value is None. + :type created_by: str + :param stage: Specifies the featurestore stage. Default value is None. + :type stage: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either FeaturestoreEntityVersion or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturestoreEntityVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + skip=skip, + tags=tags, + list_view_type=list_view_type, + page_size=page_size, + version_name=version_name, + version=version, + description=description, + created_by=created_by, + stage=stage, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("FeaturestoreEntityVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions" + } + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}" + } + + @distributed_trace + def begin_delete( + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}" + } + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.FeaturestoreEntityVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FeaturestoreEntityVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturestoreEntityVersion] = kwargs.pop("cls", None) + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("FeaturestoreEntityVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}" + } + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.FeaturestoreEntityVersion, IO], + **kwargs: Any + ) -> _models.FeaturestoreEntityVersion: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturestoreEntityVersion] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "FeaturestoreEntityVersion") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("FeaturestoreEntityVersion", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("FeaturestoreEntityVersion", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.FeaturestoreEntityVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FeaturestoreEntityVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturestoreEntityVersion or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FeaturestoreEntityVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturestoreEntityVersion or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.FeaturestoreEntityVersion, IO], + **kwargs: Any + ) -> LROPoller[_models.FeaturestoreEntityVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Is either a FeaturestoreEntityVersion type or + a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturestoreEntityVersion or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturestoreEntityVersion] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("FeaturestoreEntityVersion", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_inference_endpoints_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_inference_endpoints_operations.py new file mode 100644 index 000000000000..ddf367d40f67 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_inference_endpoints_operations.py @@ -0,0 +1,1102 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + pool_name: str, + group_name: str, + subscription_id: str, + *, + count: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + order_by: Optional[Union[str, _models.OrderString]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{groupName}/endpoints", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "poolName": _SERIALIZER.url("pool_name", pool_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "groupName": _SERIALIZER.url("group_name", group_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if count is not None: + _params["count"] = _SERIALIZER.query("count", count, "int") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if tags is not None: + _params["tags"] = _SERIALIZER.query("tags", tags, "str") + if properties is not None: + _params["properties"] = _SERIALIZER.query("properties", properties, "str") + if order_by is not None: + _params["orderBy"] = _SERIALIZER.query("order_by", order_by, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, + workspace_name: str, + pool_name: str, + group_name: str, + name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{groupName}/endpoints/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "poolName": _SERIALIZER.url("pool_name", pool_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "groupName": _SERIALIZER.url("group_name", group_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, + workspace_name: str, + pool_name: str, + group_name: str, + name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{groupName}/endpoints/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "poolName": _SERIALIZER.url("pool_name", pool_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "groupName": _SERIALIZER.url("group_name", group_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, + workspace_name: str, + pool_name: str, + group_name: str, + name: str, + subscription_id: str, + *, + json: Any, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{groupName}/endpoints/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "poolName": _SERIALIZER.url("pool_name", pool_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "groupName": _SERIALIZER.url("group_name", group_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, json=json, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, + workspace_name: str, + pool_name: str, + group_name: str, + name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{groupName}/endpoints/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "poolName": _SERIALIZER.url("pool_name", pool_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "groupName": _SERIALIZER.url("group_name", group_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class InferenceEndpointsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.MachineLearningServicesMgmtClient`'s + :attr:`inference_endpoints` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + group_name: str, + count: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + order_by: Optional[Union[str, _models.OrderString]] = None, + **kwargs: Any + ) -> Iterable["_models.InferenceEndpointMinimalTrackedResource"]: + """List Inference Endpoints. + + List Inference Endpoints. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: Name of the InferencePool. Required. + :type pool_name: str + :param group_name: InferenceGroup name. Required. + :type group_name: str + :param count: Number of InferenceEndpoint to be retrieved in a page of results. Default value + is None. + :type count: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: A set of tags with which to filter the returned models. It is a comma separated + string of tags key or tags key=value. Example: tagKey1,tagKey2,tagKey3=value3 . Default value + is None. + :type tags: str + :param properties: A set of properties with which to filter the returned models. It is a comma + separated string of properties key and/or properties key=value Example: + propKey1,propKey2,propKey3=value3 . Default value is None. + :type properties: str + :param order_by: The option to order the response. Known values are: "CreatedAtDesc", + "CreatedAtAsc", "UpdatedAtDesc", and "UpdatedAtAsc". Default value is None. + :type order_by: str or ~azure.mgmt.machinelearningservices.models.OrderString + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either InferenceEndpointMinimalTrackedResource or the + result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.InferenceEndpointMinimalTrackedResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.InferenceEndpointMinimalTrackedResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + group_name=group_name, + subscription_id=self._config.subscription_id, + count=count, + skip=skip, + tags=tags, + properties=properties, + order_by=order_by, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize( + "InferenceEndpointMinimalTrackedResourceArmPaginatedResult", pipeline_response + ) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{groupName}/endpoints" + } + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, pool_name: str, group_name: str, name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + group_name=group_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{groupName}/endpoints/{name}" + } + + @distributed_trace + def begin_delete( + self, resource_group_name: str, workspace_name: str, pool_name: str, group_name: str, name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete InferenceEndpoint (asynchronous). + + Delete InferenceEndpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param group_name: InferenceGroup name. Required. + :type group_name: str + :param name: InferenceEndpoint name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + group_name=group_name, + name=name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{groupName}/endpoints/{name}" + } + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, pool_name: str, group_name: str, name: str, **kwargs: Any + ) -> _models.InferenceEndpointMinimalTrackedResource: + """Get InferenceEndpoint. + + Get InferenceEndpoint. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param group_name: InferenceGroup name. Required. + :type group_name: str + :param name: InferenceEndpoint name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: InferenceEndpointMinimalTrackedResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.InferenceEndpointMinimalTrackedResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.InferenceEndpointMinimalTrackedResource] = kwargs.pop("cls", None) + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + group_name=group_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("InferenceEndpointMinimalTrackedResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{groupName}/endpoints/{name}" + } + + def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + group_name: str, + name: str, + body: Any, + **kwargs: Any + ) -> Optional[_models.InferenceEndpointMinimalTrackedResource]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[Optional[_models.InferenceEndpointMinimalTrackedResource]] = kwargs.pop("cls", None) + + _json = self._serialize.body(body, "object") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + group_name=group_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("InferenceEndpointMinimalTrackedResource", pipeline_response) + + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{groupName}/endpoints/{name}" + } + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + group_name: str, + name: str, + body: Any, + **kwargs: Any + ) -> LROPoller[_models.InferenceEndpointMinimalTrackedResource]: + """Update InferenceEndpoint (asynchronous). + + Update InferenceEndpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param group_name: InferenceGroup name. Required. + :type group_name: str + :param name: InferenceEndpoint name. Required. + :type name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: any + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either InferenceEndpointMinimalTrackedResource + or the result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.InferenceEndpointMinimalTrackedResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[_models.InferenceEndpointMinimalTrackedResource] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + group_name=group_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("InferenceEndpointMinimalTrackedResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{groupName}/endpoints/{name}" + } + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + group_name: str, + name: str, + body: Union[_models.InferenceEndpointMinimalTrackedResource, IO], + **kwargs: Any + ) -> _models.InferenceEndpointMinimalTrackedResource: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.InferenceEndpointMinimalTrackedResource] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "InferenceEndpointMinimalTrackedResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + group_name=group_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("InferenceEndpointMinimalTrackedResource", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("InferenceEndpointMinimalTrackedResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{groupName}/endpoints/{name}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + group_name: str, + name: str, + body: _models.InferenceEndpointMinimalTrackedResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.InferenceEndpointMinimalTrackedResource]: + """Create or update InferenceEndpoint (asynchronous). + + Create or update InferenceEndpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param group_name: InferenceGroup name. Required. + :type group_name: str + :param name: InferenceEndpoint name. Required. + :type name: str + :param body: InferenceEndpoint entity to apply during operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.InferenceEndpointMinimalTrackedResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either InferenceEndpointMinimalTrackedResource + or the result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.InferenceEndpointMinimalTrackedResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + group_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.InferenceEndpointMinimalTrackedResource]: + """Create or update InferenceEndpoint (asynchronous). + + Create or update InferenceEndpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param group_name: InferenceGroup name. Required. + :type group_name: str + :param name: InferenceEndpoint name. Required. + :type name: str + :param body: InferenceEndpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either InferenceEndpointMinimalTrackedResource + or the result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.InferenceEndpointMinimalTrackedResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + group_name: str, + name: str, + body: Union[_models.InferenceEndpointMinimalTrackedResource, IO], + **kwargs: Any + ) -> LROPoller[_models.InferenceEndpointMinimalTrackedResource]: + """Create or update InferenceEndpoint (asynchronous). + + Create or update InferenceEndpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param group_name: InferenceGroup name. Required. + :type group_name: str + :param name: InferenceEndpoint name. Required. + :type name: str + :param body: InferenceEndpoint entity to apply during operation. Is either a + InferenceEndpointMinimalTrackedResource type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.InferenceEndpointMinimalTrackedResource + or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either InferenceEndpointMinimalTrackedResource + or the result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.InferenceEndpointMinimalTrackedResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.InferenceEndpointMinimalTrackedResource] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + group_name=group_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("InferenceEndpointMinimalTrackedResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{groupName}/endpoints/{name}" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_inference_groups_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_inference_groups_operations.py new file mode 100644 index 000000000000..7c4451784d87 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_inference_groups_operations.py @@ -0,0 +1,1416 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + pool_name: str, + subscription_id: str, + *, + count: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + order_by: Optional[Union[str, _models.OrderString]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "poolName": _SERIALIZER.url("pool_name", pool_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if count is not None: + _params["count"] = _SERIALIZER.query("count", count, "int") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if tags is not None: + _params["tags"] = _SERIALIZER.query("tags", tags, "str") + if properties is not None: + _params["properties"] = _SERIALIZER.query("properties", properties, "str") + if order_by is not None: + _params["orderBy"] = _SERIALIZER.query("order_by", order_by, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, pool_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "poolName": _SERIALIZER.url("pool_name", pool_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, pool_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "poolName": _SERIALIZER.url("pool_name", pool_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, workspace_name: str, pool_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "poolName": _SERIALIZER.url("pool_name", pool_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, pool_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "poolName": _SERIALIZER.url("pool_name", pool_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_skus_request( + resource_group_name: str, + workspace_name: str, + pool_name: str, + name: str, + subscription_id: str, + *, + count: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}/skus", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "poolName": _SERIALIZER.url("pool_name", pool_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if count is not None: + _params["count"] = _SERIALIZER.query("count", count, "int") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_status_request( + resource_group_name: str, workspace_name: str, pool_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}/status", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "poolName": _SERIALIZER.url("pool_name", pool_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class InferenceGroupsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.MachineLearningServicesMgmtClient`'s + :attr:`inference_groups` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + count: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + order_by: Optional[Union[str, _models.OrderString]] = None, + **kwargs: Any + ) -> Iterable["_models.InferenceGroupMinimalTrackedResourceWithSku"]: + """List Inference Groups. + + List Inference Groups. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: Name of the InferencePool. Required. + :type pool_name: str + :param count: Number of InferenceGroup to be retrieved in a page of results. Default value is + None. + :type count: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: A set of tags with which to filter the returned models. It is a comma separated + string of tags key or tags key=value. Example: tagKey1,tagKey2,tagKey3=value3 . Default value + is None. + :type tags: str + :param properties: A set of properties with which to filter the returned models. It is a comma + separated string of properties key and/or properties key=value Example: + propKey1,propKey2,propKey3=value3 . Default value is None. + :type properties: str + :param order_by: The option to order the response. Known values are: "CreatedAtDesc", + "CreatedAtAsc", "UpdatedAtDesc", and "UpdatedAtAsc". Default value is None. + :type order_by: str or ~azure.mgmt.machinelearningservices.models.OrderString + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either InferenceGroupMinimalTrackedResourceWithSku or the + result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.InferenceGroupMinimalTrackedResourceWithSku] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.InferenceGroupMinimalTrackedResourceWithSkuArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + subscription_id=self._config.subscription_id, + count=count, + skip=skip, + tags=tags, + properties=properties, + order_by=order_by, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize( + "InferenceGroupMinimalTrackedResourceWithSkuArmPaginatedResult", pipeline_response + ) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups" + } + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, pool_name: str, name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}" + } + + @distributed_trace + def begin_delete( + self, resource_group_name: str, workspace_name: str, pool_name: str, name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete InferenceGroup (asynchronous). + + Delete InferenceGroup (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param name: InferenceGroup name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + name=name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}" + } + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, pool_name: str, name: str, **kwargs: Any + ) -> _models.InferenceGroupMinimalTrackedResourceWithSku: + """Get InferenceGroup. + + Get InferenceGroup. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param name: InferenceGroup name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: InferenceGroupMinimalTrackedResourceWithSku or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.InferenceGroupMinimalTrackedResourceWithSku + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.InferenceGroupMinimalTrackedResourceWithSku] = kwargs.pop("cls", None) + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("InferenceGroupMinimalTrackedResourceWithSku", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}" + } + + def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + name: str, + body: Union[_models.PartialMinimalTrackedResourceWithSku, IO], + **kwargs: Any + ) -> Optional[_models.InferenceGroupMinimalTrackedResourceWithSku]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.InferenceGroupMinimalTrackedResourceWithSku]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialMinimalTrackedResourceWithSku") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("InferenceGroupMinimalTrackedResourceWithSku", pipeline_response) + + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}" + } + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + name: str, + body: _models.PartialMinimalTrackedResourceWithSku, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.InferenceGroupMinimalTrackedResourceWithSku]: + """Update InferenceGroup (asynchronous). + + Update InferenceGroup (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param name: InferenceGroup name. Required. + :type name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSku + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either + InferenceGroupMinimalTrackedResourceWithSku or the result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.InferenceGroupMinimalTrackedResourceWithSku] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.InferenceGroupMinimalTrackedResourceWithSku]: + """Update InferenceGroup (asynchronous). + + Update InferenceGroup (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param name: InferenceGroup name. Required. + :type name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either + InferenceGroupMinimalTrackedResourceWithSku or the result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.InferenceGroupMinimalTrackedResourceWithSku] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + name: str, + body: Union[_models.PartialMinimalTrackedResourceWithSku, IO], + **kwargs: Any + ) -> LROPoller[_models.InferenceGroupMinimalTrackedResourceWithSku]: + """Update InferenceGroup (asynchronous). + + Update InferenceGroup (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param name: InferenceGroup name. Required. + :type name: str + :param body: Online Endpoint entity to apply during operation. Is either a + PartialMinimalTrackedResourceWithSku type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSku or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either + InferenceGroupMinimalTrackedResourceWithSku or the result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.InferenceGroupMinimalTrackedResourceWithSku] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.InferenceGroupMinimalTrackedResourceWithSku] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("InferenceGroupMinimalTrackedResourceWithSku", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}" + } + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + name: str, + body: Union[_models.InferenceGroupMinimalTrackedResourceWithSku, IO], + **kwargs: Any + ) -> _models.InferenceGroupMinimalTrackedResourceWithSku: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.InferenceGroupMinimalTrackedResourceWithSku] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "InferenceGroupMinimalTrackedResourceWithSku") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("InferenceGroupMinimalTrackedResourceWithSku", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("InferenceGroupMinimalTrackedResourceWithSku", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + name: str, + body: _models.InferenceGroupMinimalTrackedResourceWithSku, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.InferenceGroupMinimalTrackedResourceWithSku]: + """Create or update InferenceGroup (asynchronous). + + Create or update InferenceGroup (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param name: InferenceGroup name. Required. + :type name: str + :param body: InferenceGroup entity to apply during operation. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.InferenceGroupMinimalTrackedResourceWithSku + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either + InferenceGroupMinimalTrackedResourceWithSku or the result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.InferenceGroupMinimalTrackedResourceWithSku] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.InferenceGroupMinimalTrackedResourceWithSku]: + """Create or update InferenceGroup (asynchronous). + + Create or update InferenceGroup (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param name: InferenceGroup name. Required. + :type name: str + :param body: InferenceGroup entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either + InferenceGroupMinimalTrackedResourceWithSku or the result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.InferenceGroupMinimalTrackedResourceWithSku] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + name: str, + body: Union[_models.InferenceGroupMinimalTrackedResourceWithSku, IO], + **kwargs: Any + ) -> LROPoller[_models.InferenceGroupMinimalTrackedResourceWithSku]: + """Create or update InferenceGroup (asynchronous). + + Create or update InferenceGroup (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param name: InferenceGroup name. Required. + :type name: str + :param body: InferenceGroup entity to apply during operation. Is either a + InferenceGroupMinimalTrackedResourceWithSku type or a IO type. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.InferenceGroupMinimalTrackedResourceWithSku or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either + InferenceGroupMinimalTrackedResourceWithSku or the result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.InferenceGroupMinimalTrackedResourceWithSku] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.InferenceGroupMinimalTrackedResourceWithSku] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("InferenceGroupMinimalTrackedResourceWithSku", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}" + } + + @distributed_trace + def list_skus( + self, + resource_group_name: str, + workspace_name: str, + pool_name: str, + name: str, + count: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.SkuResource"]: + """List Inference Group Skus. + + List Inference Group Skus. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: Inference Pool name. Required. + :type pool_name: str + :param name: Inference Group name. Required. + :type name: str + :param count: Number of Skus to be retrieved in a page of results. Default value is None. + :type count: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either SkuResource or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.SkuResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.SkuResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_skus_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + name=name, + subscription_id=self._config.subscription_id, + count=count, + skip=skip, + api_version=api_version, + template_url=self.list_skus.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("SkuResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list_skus.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}/skus" + } + + @distributed_trace + def get_status( + self, resource_group_name: str, workspace_name: str, pool_name: str, name: str, **kwargs: Any + ) -> _models.GroupStatus: + """Retrieve inference group status. + + Retrieve inference group status. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param pool_name: InferencePool name. Required. + :type pool_name: str + :param name: InferenceGroup name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GroupStatus or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.GroupStatus + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.GroupStatus] = kwargs.pop("cls", None) + + request = build_get_status_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + pool_name=pool_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_status.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("GroupStatus", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_status.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{poolName}/groups/{name}/status" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_inference_pools_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_inference_pools_operations.py new file mode 100644 index 000000000000..fa7f35a756a0 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_inference_pools_operations.py @@ -0,0 +1,1349 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + count: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + order_by: Optional[Union[str, _models.OrderString]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if count is not None: + _params["count"] = _SERIALIZER.query("count", count, "int") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if tags is not None: + _params["tags"] = _SERIALIZER.query("tags", tags, "str") + if properties is not None: + _params["properties"] = _SERIALIZER.query("properties", properties, "str") + if order_by is not None: + _params["orderBy"] = _SERIALIZER.query("order_by", order_by, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_skus_request( + resource_group_name: str, + workspace_name: str, + name: str, + subscription_id: str, + *, + count: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}/skus", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if count is not None: + _params["count"] = _SERIALIZER.query("count", count, "int") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_status_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}/status", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class InferencePoolsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.MachineLearningServicesMgmtClient`'s + :attr:`inference_pools` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + count: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + order_by: Optional[Union[str, _models.OrderString]] = None, + **kwargs: Any + ) -> Iterable["_models.InferencePool"]: + """List InferencePools. + + List InferencePools. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param count: Number of inferencePools to be retrieved in a page of results. Default value is + None. + :type count: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: A set of tags with which to filter the returned models. It is a comma separated + string of tags key or tags key=value. Example: tagKey1,tagKey2,tagKey3=value3 . Default value + is None. + :type tags: str + :param properties: A set of properties with which to filter the returned models. It is a comma + separated string of properties key and/or properties key=value Example: + propKey1,propKey2,propKey3=value3 . Default value is None. + :type properties: str + :param order_by: The option to order the response. Known values are: "CreatedAtDesc", + "CreatedAtAsc", "UpdatedAtDesc", and "UpdatedAtAsc". Default value is None. + :type order_by: str or ~azure.mgmt.machinelearningservices.models.OrderString + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either InferencePool or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.InferencePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.InferencePoolTrackedResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + count=count, + skip=skip, + tags=tags, + properties=properties, + order_by=order_by, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("InferencePoolTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools" + } + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}" + } + + @distributed_trace + def begin_delete(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> LROPoller[None]: + """Delete InferencePool (asynchronous). + + Delete InferencePool (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: InferencePool name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}" + } + + @distributed_trace + def get(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> _models.InferencePool: + """Get InferencePool. + + Get InferencePool. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: InferencePool name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: InferencePool or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.InferencePool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.InferencePool] = kwargs.pop("cls", None) + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("InferencePool", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}" + } + + def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.PartialMinimalTrackedResourceWithSkuAndIdentity, IO], + **kwargs: Any + ) -> Optional[_models.InferencePool]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.InferencePool]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialMinimalTrackedResourceWithSkuAndIdentity") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("InferencePool", pipeline_response) + + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}" + } + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.PartialMinimalTrackedResourceWithSkuAndIdentity, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.InferencePool]: + """Update InferencePool (asynchronous). + + Update InferencePool (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: InferencePool name. Required. + :type name: str + :param body: Inference Pool entity to apply during operation. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSkuAndIdentity + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either InferencePool or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.InferencePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.InferencePool]: + """Update InferencePool (asynchronous). + + Update InferencePool (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: InferencePool name. Required. + :type name: str + :param body: Inference Pool entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either InferencePool or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.InferencePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.PartialMinimalTrackedResourceWithSkuAndIdentity, IO], + **kwargs: Any + ) -> LROPoller[_models.InferencePool]: + """Update InferencePool (asynchronous). + + Update InferencePool (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: InferencePool name. Required. + :type name: str + :param body: Inference Pool entity to apply during operation. Is either a + PartialMinimalTrackedResourceWithSkuAndIdentity type or a IO type. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSkuAndIdentity or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either InferencePool or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.InferencePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.InferencePool] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("InferencePool", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}" + } + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.InferencePool, IO], + **kwargs: Any + ) -> _models.InferencePool: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.InferencePool] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "InferencePool") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("InferencePool", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("InferencePool", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.InferencePool, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.InferencePool]: + """Create or update InferencePool (asynchronous). + + Create or update InferencePool (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: InferencePool name. Required. + :type name: str + :param body: InferencePool entity to apply during operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.InferencePool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either InferencePool or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.InferencePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.InferencePool]: + """Create or update InferencePool (asynchronous). + + Create or update InferencePool (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: InferencePool name. Required. + :type name: str + :param body: InferencePool entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either InferencePool or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.InferencePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.InferencePool, IO], + **kwargs: Any + ) -> LROPoller[_models.InferencePool]: + """Create or update InferencePool (asynchronous). + + Create or update InferencePool (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: InferencePool name. Required. + :type name: str + :param body: InferencePool entity to apply during operation. Is either a InferencePool type or + a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.InferencePool or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either InferencePool or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.InferencePool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.InferencePool] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("InferencePool", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}" + } + + @distributed_trace + def list_skus( + self, + resource_group_name: str, + workspace_name: str, + name: str, + count: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.SkuResource"]: + """List Inference Pool Skus. + + List Inference Pool Skus. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Inference Group name. Required. + :type name: str + :param count: Number of Skus to be retrieved in a page of results. Default value is None. + :type count: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either SkuResource or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.SkuResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.SkuResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_skus_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + count=count, + skip=skip, + api_version=api_version, + template_url=self.list_skus.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("SkuResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list_skus.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}/skus" + } + + @distributed_trace + def get_status(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> _models.PoolStatus: + """Retrieve inference pool status. + + Retrieve inference pool status. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: InferencePool name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PoolStatus or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PoolStatus + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.PoolStatus] = kwargs.pop("cls", None) + + request = build_get_status_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get_status.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("PoolStatus", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_status.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/inferencePools/{name}/status" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_jobs_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_jobs_operations.py index 83dc12a1ff03..94b6f11b678c 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_jobs_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_jobs_operations.py @@ -30,7 +30,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -48,12 +48,16 @@ def build_list_request( job_type: Optional[str] = None, tag: Optional[str] = None, list_view_type: Optional[Union[str, _models.ListViewType]] = None, + asset_name: Optional[str] = None, + scheduled: Optional[bool] = None, + schedule_id: Optional[str] = None, + properties: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -71,7 +75,7 @@ def build_list_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -83,6 +87,14 @@ def build_list_request( _params["tag"] = _SERIALIZER.query("tag", tag, "str") if list_view_type is not None: _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + if asset_name is not None: + _params["assetName"] = _SERIALIZER.query("asset_name", asset_name, "str") + if scheduled is not None: + _params["scheduled"] = _SERIALIZER.query("scheduled", scheduled, "bool") + if schedule_id is not None: + _params["scheduleId"] = _SERIALIZER.query("schedule_id", schedule_id, "str") + if properties is not None: + _params["properties"] = _SERIALIZER.query("properties", properties, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -96,7 +108,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -115,7 +127,7 @@ def build_delete_request( "id": _SERIALIZER.url("id", id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -132,7 +144,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -151,7 +163,7 @@ def build_get_request( "id": _SERIALIZER.url("id", id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -162,13 +174,52 @@ def build_get_request( return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) +def build_update_request( + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "id": _SERIALIZER.url("id", id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + def build_create_or_update_request( resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -188,7 +239,7 @@ def build_create_or_update_request( "id": _SERIALIZER.url("id", id, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -207,7 +258,7 @@ def build_cancel_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -226,7 +277,7 @@ def build_cancel_request( "id": _SERIALIZER.url("id", id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -265,6 +316,10 @@ def list( job_type: Optional[str] = None, tag: Optional[str] = None, list_view_type: Optional[Union[str, _models.ListViewType]] = None, + asset_name: Optional[str] = None, + scheduled: Optional[bool] = None, + schedule_id: Optional[str] = None, + properties: Optional[str] = None, **kwargs: Any ) -> Iterable["_models.JobBase"]: """Lists Jobs in the workspace. @@ -285,6 +340,15 @@ def list( :param list_view_type: View type for including/excluding (for example) archived entities. Known values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param asset_name: Asset name the job's named output is registered with. Default value is None. + :type asset_name: str + :param scheduled: Indicator whether the job is scheduled job. Default value is None. + :type scheduled: bool + :param schedule_id: The scheduled id for listing the job triggered from. Default value is None. + :type schedule_id: str + :param properties: Comma-separated list of property names (and optionally values). Example: + prop1,prop2=value2. Default value is None. + :type properties: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either JobBase or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.JobBase] @@ -315,6 +379,10 @@ def prepare_request(next_link=None): job_type=job_type, tag=tag, list_view_type=list_view_type, + asset_name=asset_name, + scheduled=scheduled, + schedule_id=schedule_id, + properties=properties, api_version=api_version, template_url=self.list.metadata["url"], headers=_headers, @@ -566,6 +634,165 @@ def get(self, resource_group_name: str, workspace_name: str, id: str, **kwargs: "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}" } + @overload + def update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.PartialJobBasePartialResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.JobBase: + """Updates a Job. + + Updates a Job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :param body: Job definition to apply during the operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialJobBasePartialResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.JobBase: + """Updates a Job. + + Updates a Job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :param body: Job definition to apply during the operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.PartialJobBasePartialResource, IO], + **kwargs: Any + ) -> _models.JobBase: + """Updates a Job. + + Updates a Job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :param body: Job definition to apply during the operation. Is either a + PartialJobBasePartialResource type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialJobBasePartialResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.JobBase] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialJobBasePartialResource") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("JobBase", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}" + } + @overload def create_or_update( self, diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_labeling_jobs_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_labeling_jobs_operations.py new file mode 100644 index 000000000000..807fc976f9a4 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_labeling_jobs_operations.py @@ -0,0 +1,1280 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + top: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if top is not None: + _params["$top"] = _SERIALIZER.query("top", top, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "id": _SERIALIZER.url("id", id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "id": _SERIALIZER.url("id", id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "id": _SERIALIZER.url("id", id, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_export_labels_request( + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "id": _SERIALIZER.url("id", id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_pause_request( + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/pause", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "id": _SERIALIZER.url("id", id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_resume_request( + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "id": _SERIALIZER.url("id", id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class LabelingJobsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.MachineLearningServicesMgmtClient`'s + :attr:`labeling_jobs` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + top: Optional[int] = None, + **kwargs: Any + ) -> Iterable["_models.LabelingJob"]: + """Lists labeling jobs in the workspace. + + Lists labeling jobs in the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param top: Number of labeling jobs to return. Default value is None. + :type top: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either LabelingJob or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.LabelingJobResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + top=top, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("LabelingJobResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs" + } + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> None: + """Delete a labeling job. + + Delete a labeling job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}" + } + + @distributed_trace + def get(self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any) -> _models.LabelingJob: + """Gets a labeling job by name/id. + + Gets a labeling job by name/id. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LabelingJob or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.LabelingJob + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.LabelingJob] = kwargs.pop("cls", None) + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("LabelingJob", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}" + } + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.LabelingJob, IO], + **kwargs: Any + ) -> _models.LabelingJob: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.LabelingJob] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "LabelingJob") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("LabelingJob", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("LabelingJob", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.LabelingJob, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.LabelingJob]: + """Creates or updates a labeling job (asynchronous). + + Creates or updates a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: LabelingJob definition object. Required. + :type body: ~azure.mgmt.machinelearningservices.models.LabelingJob + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either LabelingJob or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.LabelingJob]: + """Creates or updates a labeling job (asynchronous). + + Creates or updates a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: LabelingJob definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either LabelingJob or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.LabelingJob, IO], + **kwargs: Any + ) -> LROPoller[_models.LabelingJob]: + """Creates or updates a labeling job (asynchronous). + + Creates or updates a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: LabelingJob definition object. Is either a LabelingJob type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.LabelingJob or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either LabelingJob or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.LabelingJob] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("LabelingJob", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}" + } + + def _export_labels_initial( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.ExportSummary, IO], + **kwargs: Any + ) -> Optional[_models.ExportSummary]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.ExportSummary]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ExportSummary") + + request = build_export_labels_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._export_labels_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("ExportSummary", pipeline_response) + + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _export_labels_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels" + } + + @overload + def begin_export_labels( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.ExportSummary, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ExportSummary]: + """Export labels from a labeling job (asynchronous). Using the URL in the Location header, the + status of the job export operation can be tracked. + + Export labels from a labeling job (asynchronous). Using the URL in the Location header, the + status of the job export operation can be tracked. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: The export summary. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ExportSummary + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ExportSummary or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ExportSummary] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_export_labels( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ExportSummary]: + """Export labels from a labeling job (asynchronous). Using the URL in the Location header, the + status of the job export operation can be tracked. + + Export labels from a labeling job (asynchronous). Using the URL in the Location header, the + status of the job export operation can be tracked. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: The export summary. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ExportSummary or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ExportSummary] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_export_labels( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.ExportSummary, IO], + **kwargs: Any + ) -> LROPoller[_models.ExportSummary]: + """Export labels from a labeling job (asynchronous). Using the URL in the Location header, the + status of the job export operation can be tracked. + + Export labels from a labeling job (asynchronous). Using the URL in the Location header, the + status of the job export operation can be tracked. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: The export summary. Is either a ExportSummary type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ExportSummary or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ExportSummary or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ExportSummary] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ExportSummary] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._export_labels_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ExportSummary", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_export_labels.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels" + } + + @distributed_trace + def pause( + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> _models.LabelingJobProperties: + """Pause a labeling job. + + Pause a labeling job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LabelingJobProperties or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.LabelingJobProperties + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.LabelingJobProperties] = kwargs.pop("cls", None) + + request = build_pause_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.pause.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("LabelingJobProperties", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + pause.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/pause" + } + + def _resume_initial( + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> Optional[_models.LabelingJobProperties]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Optional[_models.LabelingJobProperties]] = kwargs.pop("cls", None) + + request = build_resume_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._resume_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("LabelingJobProperties", pipeline_response) + + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _resume_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume" + } + + @distributed_trace + def begin_resume( + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> LROPoller[_models.LabelingJobProperties]: + """Resume a labeling job (asynchronous). + + Resume a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either LabelingJobProperties or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.LabelingJobProperties] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.LabelingJobProperties] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._resume_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("LabelingJobProperties", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_resume.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_managed_network_provisions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_managed_network_provisions_operations.py new file mode 100644 index 000000000000..ff11e53b6e6a --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_managed_network_provisions_operations.py @@ -0,0 +1,340 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_provision_managed_network_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/provisionManagedNetwork", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class ManagedNetworkProvisionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.MachineLearningServicesMgmtClient`'s + :attr:`managed_network_provisions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + def _provision_managed_network_initial( + self, + resource_group_name: str, + workspace_name: str, + body: Optional[Union[_models.ManagedNetworkProvisionOptions, IO]] = None, + **kwargs: Any + ) -> Optional[_models.ManagedNetworkProvisionStatus]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.ManagedNetworkProvisionStatus]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + if body is not None: + _json = self._serialize.body(body, "ManagedNetworkProvisionOptions") + else: + _json = None + + request = build_provision_managed_network_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._provision_managed_network_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("ManagedNetworkProvisionStatus", pipeline_response) + + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _provision_managed_network_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/provisionManagedNetwork" + } + + @overload + def begin_provision_managed_network( + self, + resource_group_name: str, + workspace_name: str, + body: Optional[_models.ManagedNetworkProvisionOptions] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ManagedNetworkProvisionStatus]: + """Provisions the managed network of a machine learning workspace. + + Provisions the managed network of a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param body: Managed Network Provisioning Options for a machine learning workspace. Default + value is None. + :type body: ~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionOptions + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ManagedNetworkProvisionStatus or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_provision_managed_network( + self, + resource_group_name: str, + workspace_name: str, + body: Optional[IO] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ManagedNetworkProvisionStatus]: + """Provisions the managed network of a machine learning workspace. + + Provisions the managed network of a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param body: Managed Network Provisioning Options for a machine learning workspace. Default + value is None. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ManagedNetworkProvisionStatus or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_provision_managed_network( + self, + resource_group_name: str, + workspace_name: str, + body: Optional[Union[_models.ManagedNetworkProvisionOptions, IO]] = None, + **kwargs: Any + ) -> LROPoller[_models.ManagedNetworkProvisionStatus]: + """Provisions the managed network of a machine learning workspace. + + Provisions the managed network of a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param body: Managed Network Provisioning Options for a machine learning workspace. Is either a + ManagedNetworkProvisionOptions type or a IO type. Default value is None. + :type body: ~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionOptions or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ManagedNetworkProvisionStatus or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ManagedNetworkProvisionStatus] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._provision_managed_network_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ManagedNetworkProvisionStatus", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_provision_managed_network.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/provisionManagedNetwork" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_managed_network_settings_rule_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_managed_network_settings_rule_operations.py new file mode 100644 index 000000000000..c901aaee6808 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_managed_network_settings_rule_operations.py @@ -0,0 +1,749 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, rule_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "ruleName": _SERIALIZER.url("rule_name", rule_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, rule_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "ruleName": _SERIALIZER.url("rule_name", rule_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, rule_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "ruleName": _SERIALIZER.url("rule_name", rule_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class ManagedNetworkSettingsRuleOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.MachineLearningServicesMgmtClient`'s + :attr:`managed_network_settings_rule` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> Iterable["_models.OutboundRuleBasicResource"]: + """Lists the managed network outbound rules for a machine learning workspace. + + Lists the managed network outbound rules for a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OutboundRuleBasicResource or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.OutboundRuleListResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("OutboundRuleListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules" + } + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, rule_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + rule_name=rule_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}" + } + + @distributed_trace + def begin_delete( + self, resource_group_name: str, workspace_name: str, rule_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Deletes an outbound rule from the managed network of a machine learning workspace. + + Deletes an outbound rule from the managed network of a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param rule_name: Name of the workspace managed network outbound rule. Required. + :type rule_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + rule_name=rule_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}" + } + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, rule_name: str, **kwargs: Any + ) -> _models.OutboundRuleBasicResource: + """Gets an outbound rule from the managed network of a machine learning workspace. + + Gets an outbound rule from the managed network of a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param rule_name: Name of the workspace managed network outbound rule. Required. + :type rule_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: OutboundRuleBasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.OutboundRuleBasicResource] = kwargs.pop("cls", None) + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + rule_name=rule_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("OutboundRuleBasicResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}" + } + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + rule_name: str, + body: Union[_models.OutboundRuleBasicResource, IO], + **kwargs: Any + ) -> Optional[_models.OutboundRuleBasicResource]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.OutboundRuleBasicResource]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "OutboundRuleBasicResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + rule_name=rule_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("OutboundRuleBasicResource", pipeline_response) + + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + rule_name: str, + body: _models.OutboundRuleBasicResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.OutboundRuleBasicResource]: + """Creates or updates an outbound rule in the managed network of a machine learning workspace. + + Creates or updates an outbound rule in the managed network of a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param rule_name: Name of the workspace managed network outbound rule. Required. + :type rule_name: str + :param body: Outbound Rule to be created or updated in the managed network of a machine + learning workspace. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OutboundRuleBasicResource or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + rule_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.OutboundRuleBasicResource]: + """Creates or updates an outbound rule in the managed network of a machine learning workspace. + + Creates or updates an outbound rule in the managed network of a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param rule_name: Name of the workspace managed network outbound rule. Required. + :type rule_name: str + :param body: Outbound Rule to be created or updated in the managed network of a machine + learning workspace. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OutboundRuleBasicResource or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + rule_name: str, + body: Union[_models.OutboundRuleBasicResource, IO], + **kwargs: Any + ) -> LROPoller[_models.OutboundRuleBasicResource]: + """Creates or updates an outbound rule in the managed network of a machine learning workspace. + + Creates or updates an outbound rule in the managed network of a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param rule_name: Name of the workspace managed network outbound rule. Required. + :type rule_name: str + :param body: Outbound Rule to be created or updated in the managed network of a machine + learning workspace. Is either a OutboundRuleBasicResource type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OutboundRuleBasicResource or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.OutboundRuleBasicResource] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + rule_name=rule_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("OutboundRuleBasicResource", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_containers_operations.py index bdc20a437ab9..2c6636fb7a3a 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_containers_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_containers_operations.py @@ -28,7 +28,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -50,7 +50,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -68,7 +68,7 @@ def build_list_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -91,7 +91,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -110,7 +110,7 @@ def build_delete_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -127,7 +127,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -146,7 +146,7 @@ def build_get_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -163,7 +163,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -183,7 +183,7 @@ def build_create_or_update_request( "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_versions_operations.py index 94082df98347..6cfe3217a9e8 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_versions_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_versions_operations.py @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -21,14 +21,16 @@ from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -53,12 +55,13 @@ def build_list_request( properties: Optional[str] = None, feed: Optional[str] = None, list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -77,7 +80,7 @@ def build_list_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -101,6 +104,8 @@ def build_list_request( _params["feed"] = _SERIALIZER.query("feed", feed, "str") if list_view_type is not None: _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + if stage is not None: + _params["stage"] = _SERIALIZER.query("stage", stage, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -114,7 +119,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -134,7 +139,7 @@ def build_delete_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -151,7 +156,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -171,7 +176,7 @@ def build_get_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -188,7 +193,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -209,7 +214,7 @@ def build_create_or_update_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -222,6 +227,46 @@ def build_create_or_update_request( return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) +def build_package_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}/package", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + class ModelVersionsOperations: """ .. warning:: @@ -257,6 +302,7 @@ def list( properties: Optional[str] = None, feed: Optional[str] = None, list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, **kwargs: Any ) -> Iterable["_models.ModelVersion"]: """List model versions. @@ -293,6 +339,8 @@ def list( :param list_view_type: View type for including/excluding (for example) archived entities. Known values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param stage: Model stage. Default value is None. + :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ModelVersion or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ModelVersion] @@ -330,6 +378,7 @@ def prepare_request(next_link=None): properties=properties, feed=feed, list_view_type=list_view_type, + stage=stage, api_version=api_version, template_url=self.list.metadata["url"], headers=_headers, @@ -699,3 +748,267 @@ def create_or_update( create_or_update.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}" } + + def _package_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.PackageRequest, IO], + **kwargs: Any + ) -> Optional[_models.PackageResponse]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.PackageResponse]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PackageRequest") + + request = build_package_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._package_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("PackageResponse", pipeline_response) + + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _package_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}/package" + } + + @overload + def begin_package( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.PackageRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.PackageResponse]: + """Model Version Package operation. + + Model Version Package operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Package operation request body. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PackageRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either PackageResponse or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_package( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.PackageResponse]: + """Model Version Package operation. + + Model Version Package operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Package operation request body. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either PackageResponse or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_package( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.PackageRequest, IO], + **kwargs: Any + ) -> LROPoller[_models.PackageResponse]: + """Model Version Package operation. + + Model Version Package operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Package operation request body. Is either a PackageRequest type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.PackageRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either PackageResponse or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PackageResponse] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._package_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("PackageResponse", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_package.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}/package" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_deployments_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_deployments_operations.py index 618d15662d98..695e86a22800 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_deployments_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_deployments_operations.py @@ -30,7 +30,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -53,7 +53,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -72,7 +72,7 @@ def build_list_request( "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -100,7 +100,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -120,7 +120,7 @@ def build_delete_request( "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -142,7 +142,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -162,7 +162,7 @@ def build_get_request( "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -184,7 +184,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -209,7 +209,7 @@ def build_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -233,7 +233,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -258,7 +258,7 @@ def build_create_or_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -282,7 +282,7 @@ def build_get_logs_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -303,7 +303,7 @@ def build_get_logs_request( "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -330,7 +330,7 @@ def build_list_skus_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -350,7 +350,7 @@ def build_list_skus_request( "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_endpoints_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_endpoints_operations.py index 6fbb16092160..b76f06904725 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_endpoints_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_endpoints_operations.py @@ -30,7 +30,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -56,7 +56,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -74,7 +74,7 @@ def build_list_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -105,7 +105,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -124,7 +124,7 @@ def build_delete_request( "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -141,7 +141,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -160,7 +160,7 @@ def build_get_request( "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -177,7 +177,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -197,7 +197,7 @@ def build_update_request( "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -216,7 +216,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -238,7 +238,7 @@ def build_create_or_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -257,7 +257,7 @@ def build_list_keys_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -276,7 +276,7 @@ def build_list_keys_request( "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -293,7 +293,7 @@ def build_regenerate_keys_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -313,7 +313,7 @@ def build_regenerate_keys_request( "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -332,7 +332,7 @@ def build_get_token_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -351,7 +351,7 @@ def build_get_token_request( "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_operations.py index 7dbac1fe15f1..c3beda87a8d1 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_operations.py @@ -40,7 +40,7 @@ def build_list_request(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -78,6 +78,8 @@ def __init__(self, *args, **kwargs): def list(self, **kwargs: Any) -> Iterable["_models.AmlOperation"]: """Lists all of the available Azure Machine Learning Workspaces REST API operations. + Lists all of the available Azure Machine Learning Workspaces REST API operations. + :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either AmlOperation or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.AmlOperation] diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_endpoint_connections_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_endpoint_connections_operations.py index a5d73b4be943..c7beece3543f 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_endpoint_connections_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_endpoint_connections_operations.py @@ -28,7 +28,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -43,7 +43,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -52,16 +52,16 @@ def build_list_request( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections", ) # pylint: disable=line-too-long path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 ), "workspaceName": _SERIALIZER.url( "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" ), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -72,7 +72,7 @@ def build_list_request( return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_get_request( +def build_delete_request( resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, @@ -82,7 +82,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -103,7 +103,7 @@ def build_get_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -111,10 +111,10 @@ def build_get_request( # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) -def build_create_or_update_request( +def build_get_request( resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, @@ -124,8 +124,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -146,20 +145,18 @@ def build_create_or_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_delete_request( +def build_create_or_update_request( resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, @@ -169,7 +166,8 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -190,15 +188,17 @@ def build_delete_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) class PrivateEndpointConnectionsOperations: @@ -224,7 +224,9 @@ def __init__(self, *args, **kwargs): def list( self, resource_group_name: str, workspace_name: str, **kwargs: Any ) -> Iterable["_models.PrivateEndpointConnection"]: - """List all the private endpoint connections associated with the workspace. + """Called by end-users to get all PE connections. + + Called by end-users to get all PE connections. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -314,19 +316,86 @@ def get_next(next_link=None): "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections" } + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> None: + """Called by end-users to delete a PE connection. + + Called by end-users to delete a PE connection. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. Required. + :type private_endpoint_connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}" + } + @distributed_trace def get( self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any ) -> _models.PrivateEndpointConnection: - """Gets the specified private endpoint connection associated with the workspace. + """Called by end-users to get a PE connection. + + Called by end-users to get a PE connection. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the workspace. Required. + :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. Required. :type private_endpoint_connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: PrivateEndpointConnection or the result of cls(response) @@ -389,23 +458,26 @@ def create_or_update( resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, - properties: _models.PrivateEndpointConnection, + body: _models.PrivateEndpointConnection, *, content_type: str = "application/json", **kwargs: Any ) -> _models.PrivateEndpointConnection: - """Update the state of specified private endpoint connection associated with the workspace. + """Called by end-users to approve or reject a PE connection. + This method must validate and forward the call to NRP. + + Called by end-users to approve or reject a PE connection. + This method must validate and forward the call to NRP. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the workspace. Required. + :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. Required. :type private_endpoint_connection_name: str - :param properties: The private endpoint connection properties. Required. - :type properties: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection + :param body: PrivateEndpointConnection object. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -421,23 +493,26 @@ def create_or_update( resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, - properties: IO, + body: IO, *, content_type: str = "application/json", **kwargs: Any ) -> _models.PrivateEndpointConnection: - """Update the state of specified private endpoint connection associated with the workspace. + """Called by end-users to approve or reject a PE connection. + This method must validate and forward the call to NRP. + + Called by end-users to approve or reject a PE connection. + This method must validate and forward the call to NRP. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the workspace. Required. + :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. Required. :type private_endpoint_connection_name: str - :param properties: The private endpoint connection properties. Required. - :type properties: IO + :param body: PrivateEndpointConnection object. Required. + :type body: IO :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str @@ -453,22 +528,25 @@ def create_or_update( resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, - properties: Union[_models.PrivateEndpointConnection, IO], + body: Union[_models.PrivateEndpointConnection, IO], **kwargs: Any ) -> _models.PrivateEndpointConnection: - """Update the state of specified private endpoint connection associated with the workspace. + """Called by end-users to approve or reject a PE connection. + This method must validate and forward the call to NRP. + + Called by end-users to approve or reject a PE connection. + This method must validate and forward the call to NRP. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the workspace. Required. + :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. Required. :type private_endpoint_connection_name: str - :param properties: The private endpoint connection properties. Is either a - PrivateEndpointConnection type or a IO type. Required. - :type properties: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection or IO + :param body: PrivateEndpointConnection object. Is either a PrivateEndpointConnection type or a + IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. Default value is None. :paramtype content_type: str @@ -495,10 +573,10 @@ def create_or_update( content_type = content_type or "application/json" _json = None _content = None - if isinstance(properties, (IOBase, bytes)): - _content = properties + if isinstance(body, (IOBase, bytes)): + _content = body else: - _json = self._serialize.body(properties, "PrivateEndpointConnection") + _json = self._serialize.body(body, "PrivateEndpointConnection") request = build_create_or_update_request( resource_group_name=resource_group_name, @@ -538,68 +616,3 @@ def create_or_update( create_or_update.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}" } - - @distributed_trace - def delete( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any - ) -> None: - """Deletes the specified private endpoint connection associated with the workspace. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. Required. - :type workspace_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection associated - with the workspace. Required. - :type private_endpoint_connection_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - - request = build_delete_request( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - private_endpoint_connection_name=private_endpoint_connection_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=self.delete.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}" - } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_link_resources_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_link_resources_operations.py index a262b6f295cc..45dd61474ad9 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_link_resources_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_link_resources_operations.py @@ -6,7 +6,8 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Optional, TypeVar +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -16,6 +17,7 @@ ResourceNotModifiedError, map_error, ) +from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest @@ -25,7 +27,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -40,7 +42,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -58,7 +60,7 @@ def build_list_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -91,8 +93,22 @@ def __init__(self, *args, **kwargs): @distributed_trace def list( self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> _models.PrivateLinkResourceListResult: - """Gets the private link resources that need to be created for a workspace. + ) -> Iterable["_models.PrivateLinkResource"]: + """Called by Client (Portal, CLI, etc) to get available "private link resources" for the + workspace. + Each "private link resource" is a connection endpoint (IP address) to the resource. + Pre single connection endpoint per workspace: the Data Plane IP address, returned by DNS + resolution. + Other RPs, such as Azure Storage, have multiple - one for Blobs, other for Queues, etc. + Defined in the "[NRP] Private Endpoint Design" doc, topic "GET API for GroupIds". + + Called by Client (Portal, CLI, etc) to get available "private link resources" for the + workspace. + Each "private link resource" is a connection endpoint (IP address) to the resource. + Pre single connection endpoint per workspace: the Data Plane IP address, returned by DNS + resolution. + Other RPs, such as Azure Storage, have multiple - one for Blobs, other for Queues, etc. + Defined in the "[NRP] Private Endpoint Design" doc, topic "GET API for GroupIds". :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -100,10 +116,17 @@ def list( :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PrivateLinkResourceListResult or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.PrivateLinkResourceListResult + :return: An iterator like instance of either PrivateLinkResource or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PrivateLinkResource] :raises ~azure.core.exceptions.HttpResponseError: """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.PrivateLinkResourceListResult] = kwargs.pop("cls", None) + error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -112,42 +135,63 @@ def list( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.PrivateLinkResourceListResult] = kwargs.pop("cls", None) - - request = build_list_request( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=self.list.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("PrivateLinkResourceListResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("PrivateLinkResourceListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) list.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources" diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_quotas_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_quotas_operations.py index 1896a7522c0b..3733da6db14c 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_quotas_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_quotas_operations.py @@ -28,7 +28,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -41,7 +41,7 @@ def build_update_request(location: str, subscription_id: str, **kwargs: Any) -> _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -55,7 +55,7 @@ def build_update_request(location: str, subscription_id: str, **kwargs: Any) -> "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -72,7 +72,7 @@ def build_list_request(location: str, subscription_id: str, **kwargs: Any) -> Ht _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -85,7 +85,7 @@ def build_list_request(location: str, subscription_id: str, **kwargs: Any) -> Ht "location": _SERIALIZER.url("location", location, "str", pattern=r"^[-\w\._]+$"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registries_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registries_operations.py index 089d9208e62c..82e194f3d2ef 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registries_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registries_operations.py @@ -30,7 +30,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -43,7 +43,7 @@ def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> H _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -54,7 +54,7 @@ def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> H "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -69,7 +69,7 @@ def build_list_request(resource_group_name: str, subscription_id: str, **kwargs: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -84,7 +84,7 @@ def build_list_request(resource_group_name: str, subscription_id: str, **kwargs: ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -101,7 +101,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -119,7 +119,7 @@ def build_delete_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -134,7 +134,7 @@ def build_get_request(resource_group_name: str, registry_name: str, subscription _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -152,7 +152,7 @@ def build_get_request(resource_group_name: str, registry_name: str, subscription ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -169,7 +169,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -188,7 +188,7 @@ def build_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -207,7 +207,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -226,7 +226,7 @@ def build_create_or_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -245,7 +245,7 @@ def build_remove_regions_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -264,7 +264,7 @@ def build_remove_regions_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_code_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_code_containers_operations.py index c925671f74d2..e6aa97f4d05f 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_code_containers_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_code_containers_operations.py @@ -30,7 +30,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -45,7 +45,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -63,7 +63,7 @@ def build_list_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -82,7 +82,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -101,7 +101,7 @@ def build_delete_request( "codeName": _SERIALIZER.url("code_name", code_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -118,7 +118,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -137,7 +137,7 @@ def build_get_request( "codeName": _SERIALIZER.url("code_name", code_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -154,7 +154,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -174,7 +174,7 @@ def build_create_or_update_request( "codeName": _SERIALIZER.url("code_name", code_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_code_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_code_versions_operations.py index 431682513a37..04ef7228d53d 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_code_versions_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_code_versions_operations.py @@ -30,7 +30,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -53,7 +53,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -72,7 +72,7 @@ def build_list_request( "codeName": _SERIALIZER.url("code_name", code_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -95,7 +95,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -115,7 +115,7 @@ def build_delete_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -132,7 +132,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -152,7 +152,7 @@ def build_get_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -169,7 +169,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -190,7 +190,7 @@ def build_create_or_update_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -209,7 +209,7 @@ def build_create_or_get_start_pending_upload_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -230,7 +230,7 @@ def build_create_or_get_start_pending_upload_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_component_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_component_containers_operations.py index 8fa9042f77a3..bf2c3205a2ae 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_component_containers_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_component_containers_operations.py @@ -30,7 +30,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -45,7 +45,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -63,7 +63,7 @@ def build_list_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -82,7 +82,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -103,7 +103,7 @@ def build_delete_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -120,7 +120,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -141,7 +141,7 @@ def build_get_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -158,7 +158,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -180,7 +180,7 @@ def build_create_or_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_component_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_component_versions_operations.py index 7bb69cf38e1a..f525b3ad42df 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_component_versions_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_component_versions_operations.py @@ -30,7 +30,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -48,12 +48,13 @@ def build_list_request( order_by: Optional[str] = None, top: Optional[int] = None, skip: Optional[str] = None, + stage: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -74,7 +75,7 @@ def build_list_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -84,6 +85,8 @@ def build_list_request( _params["$top"] = _SERIALIZER.query("top", top, "int") if skip is not None: _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if stage is not None: + _params["stage"] = _SERIALIZER.query("stage", stage, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -97,7 +100,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -119,7 +122,7 @@ def build_delete_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -136,7 +139,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -158,7 +161,7 @@ def build_get_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -175,7 +178,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -198,7 +201,7 @@ def build_create_or_update_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -239,6 +242,7 @@ def list( order_by: Optional[str] = None, top: Optional[int] = None, skip: Optional[str] = None, + stage: Optional[str] = None, **kwargs: Any ) -> Iterable["_models.ComponentVersion"]: """List versions. @@ -259,6 +263,8 @@ def list( :type top: int :param skip: Continuation token for pagination. Default value is None. :type skip: str + :param stage: Component stage. Default value is None. + :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ComponentVersion or the result of cls(response) :rtype: @@ -290,6 +296,7 @@ def prepare_request(next_link=None): order_by=order_by, top=top, skip=skip, + stage=stage, api_version=api_version, template_url=self.list.metadata["url"], headers=_headers, diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_data_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_data_containers_operations.py index a5a3f85fe645..a791e8aaef71 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_data_containers_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_data_containers_operations.py @@ -30,7 +30,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -51,7 +51,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -69,7 +69,7 @@ def build_list_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -90,7 +90,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -109,7 +109,7 @@ def build_delete_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -126,7 +126,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -145,7 +145,7 @@ def build_get_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -162,7 +162,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -182,7 +182,7 @@ def build_create_or_update_request( "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_data_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_data_versions_operations.py index 7fda48b37788..9561ed06ca35 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_data_versions_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_data_versions_operations.py @@ -30,7 +30,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -55,7 +55,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -74,7 +74,7 @@ def build_list_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -101,7 +101,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -121,7 +121,7 @@ def build_delete_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -138,7 +138,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -158,7 +158,7 @@ def build_get_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -175,7 +175,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -196,7 +196,7 @@ def build_create_or_update_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -215,7 +215,7 @@ def build_create_or_get_start_pending_upload_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -236,7 +236,7 @@ def build_create_or_get_start_pending_upload_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_environment_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_environment_containers_operations.py index 3a3ff9edbad1..9b9c2aeb1cfc 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_environment_containers_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_environment_containers_operations.py @@ -30,7 +30,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -51,7 +51,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -69,7 +69,7 @@ def build_list_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -90,7 +90,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -111,7 +111,7 @@ def build_delete_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -128,7 +128,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -149,7 +149,7 @@ def build_get_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -166,7 +166,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -188,7 +188,7 @@ def build_create_or_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_environment_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_environment_versions_operations.py index 727a7a9602ce..777526c27cf9 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_environment_versions_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_environment_versions_operations.py @@ -30,7 +30,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -49,12 +49,13 @@ def build_list_request( top: Optional[int] = None, skip: Optional[str] = None, list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -75,7 +76,7 @@ def build_list_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -87,6 +88,8 @@ def build_list_request( _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if list_view_type is not None: _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") + if stage is not None: + _params["stage"] = _SERIALIZER.query("stage", stage, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -105,7 +108,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -127,7 +130,7 @@ def build_delete_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -149,7 +152,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -171,7 +174,7 @@ def build_get_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -193,7 +196,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -216,7 +219,7 @@ def build_create_or_update_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -258,6 +261,7 @@ def list( top: Optional[int] = None, skip: Optional[str] = None, list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, **kwargs: Any ) -> Iterable["_models.EnvironmentVersion"]: """List versions. @@ -281,6 +285,9 @@ def list( :param list_view_type: View type for including/excluding (for example) archived entities. Known values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param stage: Stage for including/excluding (for example) archived entities. Takes priority + over listViewType. Default value is None. + :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either EnvironmentVersion or the result of cls(response) :rtype: @@ -313,6 +320,7 @@ def prepare_request(next_link=None): top=top, skip=skip, list_view_type=list_view_type, + stage=stage, api_version=api_version, template_url=self.list.metadata["url"], headers=_headers, diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_model_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_model_containers_operations.py index 8c44417d63e7..5a88e6196090 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_model_containers_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_model_containers_operations.py @@ -30,7 +30,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -51,7 +51,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -69,7 +69,7 @@ def build_list_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -90,7 +90,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -109,7 +109,7 @@ def build_delete_request( "modelName": _SERIALIZER.url("model_name", model_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -126,7 +126,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -145,7 +145,7 @@ def build_get_request( "modelName": _SERIALIZER.url("model_name", model_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -162,7 +162,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -182,7 +182,7 @@ def build_create_or_update_request( "modelName": _SERIALIZER.url("model_name", model_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_model_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_model_versions_operations.py index d883c93e7433..f583f64f8d4b 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_model_versions_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_registry_model_versions_operations.py @@ -30,7 +30,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -58,7 +58,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -77,7 +77,7 @@ def build_list_request( "modelName": _SERIALIZER.url("model_name", model_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -110,7 +110,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -130,7 +130,7 @@ def build_delete_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -147,7 +147,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -167,7 +167,7 @@ def build_get_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -184,7 +184,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -205,7 +205,7 @@ def build_create_or_update_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -218,13 +218,53 @@ def build_create_or_update_request( return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) +def build_package_request( + resource_group_name: str, registry_name: str, model_name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}/package", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "modelName": _SERIALIZER.url("model_name", model_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + def build_create_or_get_start_pending_upload_request( resource_group_name: str, registry_name: str, model_name: str, version: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -245,7 +285,7 @@ def build_create_or_get_start_pending_upload_request( "version": _SERIALIZER.url("version", version, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -892,6 +932,273 @@ def get_long_running_output(pipeline_response): "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}" } + def _package_initial( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: Union[_models.PackageRequest, IO], + **kwargs: Any + ) -> Optional[_models.PackageResponse]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.PackageResponse]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PackageRequest") + + request = build_package_request( + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + version=version, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._package_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("PackageResponse", pipeline_response) + + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _package_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}/package" + } + + @overload + def begin_package( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: _models.PackageRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.PackageResponse]: + """Model Version Package operation. + + Model Version Package operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param model_name: Container name. This is case-sensitive. Required. + :type model_name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Package operation request body. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PackageRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either PackageResponse or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_package( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.PackageResponse]: + """Model Version Package operation. + + Model Version Package operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param model_name: Container name. This is case-sensitive. Required. + :type model_name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Package operation request body. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either PackageResponse or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_package( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: Union[_models.PackageRequest, IO], + **kwargs: Any + ) -> LROPoller[_models.PackageResponse]: + """Model Version Package operation. + + Model Version Package operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param model_name: Container name. This is case-sensitive. Required. + :type model_name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Package operation request body. Is either a PackageRequest type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.PackageRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either PackageResponse or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PackageResponse] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._package_initial( + resource_group_name=resource_group_name, + registry_name=registry_name, + model_name=model_name, + version=version, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("PackageResponse", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_package.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}/package" + } + @overload def create_or_get_start_pending_upload( self, diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_schedules_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_schedules_operations.py index 2e25ec11855a..a0eeed503f41 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_schedules_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_schedules_operations.py @@ -30,7 +30,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -51,7 +51,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -69,7 +69,7 @@ def build_list_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -90,7 +90,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -109,7 +109,7 @@ def build_delete_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -126,7 +126,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -145,7 +145,7 @@ def build_get_request( "name": _SERIALIZER.url("name", name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -162,7 +162,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -182,7 +182,7 @@ def build_create_or_update_request( "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_serverless_endpoints_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_serverless_endpoints_operations.py new file mode 100644 index 000000000000..579901847783 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_serverless_endpoints_operations.py @@ -0,0 +1,1452 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, workspace_name: str, subscription_id: str, *, skip: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_keys_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/listKeys", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_regenerate_keys_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/regenerateKeys", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class ServerlessEndpointsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.MachineLearningServicesMgmtClient`'s + :attr:`serverless_endpoints` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, workspace_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> Iterable["_models.ServerlessEndpoint"]: + """List Serverless Endpoints. + + List Serverless Endpoints. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ServerlessEndpoint or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ServerlessEndpointTrackedResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + skip=skip, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ServerlessEndpointTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints" + } + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } + + @distributed_trace + def begin_delete(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> LROPoller[None]: + """Delete Serverless Endpoint (asynchronous). + + Delete Serverless Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.ServerlessEndpoint: + """Get Serverless Endpoint. + + Get Serverless Endpoint. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ServerlessEndpoint or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ServerlessEndpoint + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ServerlessEndpoint] = kwargs.pop("cls", None) + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } + + def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.PartialMinimalTrackedResourceWithSkuAndIdentity, IO], + **kwargs: Any + ) -> Optional[_models.ServerlessEndpoint]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.ServerlessEndpoint]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialMinimalTrackedResourceWithSkuAndIdentity") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) + + if response.status_code == 202: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.PartialMinimalTrackedResourceWithSkuAndIdentity, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ServerlessEndpoint]: + """Update Serverless Endpoint (asynchronous). + + Update Serverless Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: Serverless Endpoint entity to apply during operation. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSkuAndIdentity + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ServerlessEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ServerlessEndpoint]: + """Update Serverless Endpoint (asynchronous). + + Update Serverless Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: Serverless Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ServerlessEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.PartialMinimalTrackedResourceWithSkuAndIdentity, IO], + **kwargs: Any + ) -> LROPoller[_models.ServerlessEndpoint]: + """Update Serverless Endpoint (asynchronous). + + Update Serverless Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: Serverless Endpoint entity to apply during operation. Is either a + PartialMinimalTrackedResourceWithSkuAndIdentity type or a IO type. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSkuAndIdentity or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ServerlessEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ServerlessEndpoint] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.ServerlessEndpoint, IO], + **kwargs: Any + ) -> _models.ServerlessEndpoint: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ServerlessEndpoint] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ServerlessEndpoint") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) + + if response.status_code == 201: + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.ServerlessEndpoint, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ServerlessEndpoint]: + """Create or update Serverless Endpoint (asynchronous). + + Create or update Serverless Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: Serverless Endpoint entity to apply during operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ServerlessEndpoint + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ServerlessEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ServerlessEndpoint]: + """Create or update Serverless Endpoint (asynchronous). + + Create or update Serverless Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: Serverless Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ServerlessEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.ServerlessEndpoint, IO], + **kwargs: Any + ) -> LROPoller[_models.ServerlessEndpoint]: + """Create or update Serverless Endpoint (asynchronous). + + Create or update Serverless Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: Serverless Endpoint entity to apply during operation. Is either a + ServerlessEndpoint type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ServerlessEndpoint or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ServerlessEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ServerlessEndpoint] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } + + @distributed_trace + def list_keys( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.EndpointAuthKeys: + """List EndpointAuthKeys for an Endpoint using Key-based authentication. + + List EndpointAuthKeys for an Endpoint using Key-based authentication. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EndpointAuthKeys or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EndpointAuthKeys] = kwargs.pop("cls", None) + + request = build_list_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_keys.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("EndpointAuthKeys", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/listKeys" + } + + def _regenerate_keys_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.RegenerateEndpointKeysRequest, IO], + **kwargs: Any + ) -> Optional[_models.EndpointAuthKeys]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.EndpointAuthKeys]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "RegenerateEndpointKeysRequest") + + request = build_regenerate_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._regenerate_keys_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("EndpointAuthKeys", pipeline_response) + + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _regenerate_keys_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/regenerateKeys" + } + + @overload + def begin_regenerate_keys( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.RegenerateEndpointKeysRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.EndpointAuthKeys]: + """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: RegenerateKeys request . Required. + :type body: ~azure.mgmt.machinelearningservices.models.RegenerateEndpointKeysRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either EndpointAuthKeys or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.EndpointAuthKeys] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_regenerate_keys( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.EndpointAuthKeys]: + """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: RegenerateKeys request . Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either EndpointAuthKeys or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.EndpointAuthKeys] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_regenerate_keys( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.RegenerateEndpointKeysRequest, IO], + **kwargs: Any + ) -> LROPoller[_models.EndpointAuthKeys]: + """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: RegenerateKeys request . Is either a RegenerateEndpointKeysRequest type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.RegenerateEndpointKeysRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either EndpointAuthKeys or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.EndpointAuthKeys] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EndpointAuthKeys] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._regenerate_keys_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("EndpointAuthKeys", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_regenerate_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/regenerateKeys" + } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_usages_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_usages_operations.py index 3dc29c049f1d..d72d8db8a24b 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_usages_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_usages_operations.py @@ -27,7 +27,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -40,7 +40,7 @@ def build_list_request(location: str, subscription_id: str, **kwargs: Any) -> Ht _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -53,7 +53,7 @@ def build_list_request(location: str, subscription_id: str, **kwargs: Any) -> Ht "location": _SERIALIZER.url("location", location, "str", pattern=r"^[-\w\._]+$"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_virtual_machine_sizes_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_virtual_machine_sizes_operations.py index 27bb7d3b448a..2a0973e82c71 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_virtual_machine_sizes_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_virtual_machine_sizes_operations.py @@ -25,7 +25,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -38,7 +38,7 @@ def build_list_request(location: str, subscription_id: str, **kwargs: Any) -> Ht _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -51,7 +51,7 @@ def build_list_request(location: str, subscription_id: str, **kwargs: Any) -> Ht "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_connections_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_connections_operations.py index 28c7857fccfb..728471b3ca5b 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_connections_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_connections_operations.py @@ -28,7 +28,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -37,14 +37,58 @@ _SERIALIZER.client_side_validation = False -def build_create_request( +def build_list_request( + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + target: Optional[str] = None, + category: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if target is not None: + _params["target"] = _SERIALIZER.query("target", target, "str") + if category is not None: + _params["category"] = _SERIALIZER.query("category", category, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( resource_group_name: str, workspace_name: str, connection_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -60,20 +104,20 @@ def build_create_request( "workspaceName": _SERIALIZER.url( "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" ), - "connectionName": _SERIALIZER.url("connection_name", connection_name, "str"), + "connectionName": _SERIALIZER.url( + "connection_name", connection_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( @@ -82,7 +126,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -98,10 +142,12 @@ def build_get_request( "workspaceName": _SERIALIZER.url( "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" ), - "connectionName": _SERIALIZER.url("connection_name", connection_name, "str"), + "connectionName": _SERIALIZER.url( + "connection_name", connection_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -112,13 +158,14 @@ def build_get_request( return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_delete_request( +def build_update_request( resource_group_name: str, workspace_name: str, connection_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -134,39 +181,38 @@ def build_delete_request( "workspaceName": _SERIALIZER.url( "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" ), - "connectionName": _SERIALIZER.url("connection_name", connection_name, "str"), + "connectionName": _SERIALIZER.url( + "connection_name", connection_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) -def build_list_request( - resource_group_name: str, - workspace_name: str, - subscription_id: str, - *, - target: Optional[str] = None, - category: Optional[str] = None, - **kwargs: Any +def build_create_request( + resource_group_name: str, workspace_name: str, connection_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}", ) # pylint: disable=line-too-long path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), @@ -176,54 +222,340 @@ def build_list_request( "workspaceName": _SERIALIZER.url( "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" ), + "connectionName": _SERIALIZER.url( + "connection_name", connection_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - if target is not None: - _params["target"] = _SERIALIZER.query("target", target, "str") - if category is not None: - _params["category"] = _SERIALIZER.query("category", category, "str") _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_secrets_request( + resource_group_name: str, workspace_name: str, connection_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}/listsecrets", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "connectionName": _SERIALIZER.url( + "connection_name", connection_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class WorkspaceConnectionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.MachineLearningServicesMgmtClient`'s + :attr:`workspace_connections` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + target: Optional[str] = None, + category: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.WorkspaceConnectionPropertiesV2BasicResource"]: + """Lists all the available machine learning workspaces connections under the specified workspace. + + Lists all the available machine learning workspaces connections under the specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param target: Target of the workspace connection. Default value is None. + :type target: str + :param category: Category of the workspace connection. Default value is None. + :type category: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either WorkspaceConnectionPropertiesV2BasicResource or + the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + target=target, + category=category, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize( + "WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult", pipeline_response + ) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections" + } + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, connection_name: str, **kwargs: Any + ) -> None: + """Delete machine learning workspaces connections by name. + + Delete machine learning workspaces connections by name. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}" + } + + @distributed_trace + def get( + self, resource_group_name: str, workspace_name: str, connection_name: str, **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """Lists machine learning workspaces connections by name. + + Lists machine learning workspaces connections by name. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResource] = kwargs.pop("cls", None) + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) -class WorkspaceConnectionsOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. + deserialized = self._deserialize("WorkspaceConnectionPropertiesV2BasicResource", pipeline_response) - Instead, you should access the following operations through - :class:`~azure.mgmt.machinelearningservices.MachineLearningServicesMgmtClient`'s - :attr:`workspace_connections` attribute. - """ + if cls: + return cls(pipeline_response, deserialized, {}) - models = _models + return deserialized - def __init__(self, *args, **kwargs): - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}" + } @overload - def create( + def update( self, resource_group_name: str, workspace_name: str, connection_name: str, - parameters: _models.WorkspaceConnectionPropertiesV2BasicResource, + body: Optional[_models.WorkspaceConnectionUpdateParameter] = None, *, content_type: str = "application/json", **kwargs: Any ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: - """create. + """Update machine learning workspaces connections under the specified workspace. + + Update machine learning workspaces connections under the specified workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -232,9 +564,8 @@ def create( :type workspace_name: str :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str - :param parameters: The object for creating or updating a new workspace connection. Required. - :type parameters: - ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :param body: Parameters for workspace connection update. Default value is None. + :type body: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionUpdateParameter :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -245,17 +576,19 @@ def create( """ @overload - def create( + def update( self, resource_group_name: str, workspace_name: str, connection_name: str, - parameters: IO, + body: Optional[IO] = None, *, content_type: str = "application/json", **kwargs: Any ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: - """create. + """Update machine learning workspaces connections under the specified workspace. + + Update machine learning workspaces connections under the specified workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -264,8 +597,8 @@ def create( :type workspace_name: str :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str - :param parameters: The object for creating or updating a new workspace connection. Required. - :type parameters: IO + :param body: Parameters for workspace connection update. Default value is None. + :type body: IO :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str @@ -276,15 +609,17 @@ def create( """ @distributed_trace - def create( + def update( self, resource_group_name: str, workspace_name: str, connection_name: str, - parameters: Union[_models.WorkspaceConnectionPropertiesV2BasicResource, IO], + body: Optional[Union[_models.WorkspaceConnectionUpdateParameter, IO]] = None, **kwargs: Any ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: - """create. + """Update machine learning workspaces connections under the specified workspace. + + Update machine learning workspaces connections under the specified workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -293,10 +628,9 @@ def create( :type workspace_name: str :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str - :param parameters: The object for creating or updating a new workspace connection. Is either a - WorkspaceConnectionPropertiesV2BasicResource type or a IO type. Required. - :type parameters: - ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource or IO + :param body: Parameters for workspace connection update. Is either a + WorkspaceConnectionUpdateParameter type or a IO type. Default value is None. + :type body: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionUpdateParameter or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. Default value is None. :paramtype content_type: str @@ -323,12 +657,15 @@ def create( content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters + if isinstance(body, (IOBase, bytes)): + _content = body else: - _json = self._serialize.body(parameters, "WorkspaceConnectionPropertiesV2BasicResource") + if body is not None: + _json = self._serialize.body(body, "WorkspaceConnectionUpdateParameter") + else: + _json = None - request = build_create_request( + request = build_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, @@ -337,7 +674,7 @@ def create( content_type=content_type, json=_json, content=_content, - template_url=self.create.metadata["url"], + template_url=self.update.metadata["url"], headers=_headers, params=_params, ) @@ -363,15 +700,91 @@ def create( return deserialized - create.metadata = { + update.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}" } + @overload + def create( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[_models.WorkspaceConnectionPropertiesV2BasicResource] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """Create or update machine learning workspaces connections under the specified workspace. + + Create or update machine learning workspaces connections under the specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :param body: The object for creating or updating a new workspace connection. Default value is + None. + :type body: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[IO] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """Create or update machine learning workspaces connections under the specified workspace. + + Create or update machine learning workspaces connections under the specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :param body: The object for creating or updating a new workspace connection. Default value is + None. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + @distributed_trace - def get( - self, resource_group_name: str, workspace_name: str, connection_name: str, **kwargs: Any + def create( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[Union[_models.WorkspaceConnectionPropertiesV2BasicResource, IO]] = None, + **kwargs: Any ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: - """get. + """Create or update machine learning workspaces connections under the specified workspace. + + Create or update machine learning workspaces connections under the specified workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -380,6 +793,13 @@ def get( :type workspace_name: str :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str + :param body: The object for creating or updating a new workspace connection. Is either a + WorkspaceConnectionPropertiesV2BasicResource type or a IO type. Default value is None. + :type body: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource @@ -393,19 +813,34 @@ def get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = kwargs.pop("headers", {}) or {} + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResource] = kwargs.pop("cls", None) - request = build_get_request( + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + if body is not None: + _json = self._serialize.body(body, "WorkspaceConnectionPropertiesV2BasicResource") + else: + _json = None + + request = build_create_request( resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], + content_type=content_type, + json=_json, + content=_content, + template_url=self.create.metadata["url"], headers=_headers, params=_params, ) @@ -431,15 +866,17 @@ def get( return deserialized - get.metadata = { + create.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}" } @distributed_trace - def delete( # pylint: disable=inconsistent-return-statements + def list_secrets( self, resource_group_name: str, workspace_name: str, connection_name: str, **kwargs: Any - ) -> None: - """delete. + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """List all the secrets of a machine learning workspaces connections. + + List all the secrets of a machine learning workspaces connections. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -449,8 +886,8 @@ def delete( # pylint: disable=inconsistent-return-statements :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None or the result of cls(response) - :rtype: None + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource :raises ~azure.core.exceptions.HttpResponseError: """ error_map = { @@ -465,15 +902,15 @@ def delete( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResource] = kwargs.pop("cls", None) - request = build_delete_request( + request = build_list_secrets_request( resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], + template_url=self.list_secrets.metadata["url"], headers=_headers, params=_params, ) @@ -487,121 +924,18 @@ def delete( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}" - } - - @distributed_trace - def list( - self, - resource_group_name: str, - workspace_name: str, - target: Optional[str] = None, - category: Optional[str] = None, - **kwargs: Any - ) -> Iterable["_models.WorkspaceConnectionPropertiesV2BasicResource"]: - """list. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. Required. - :type workspace_name: str - :param target: Target of the workspace connection. Default value is None. - :type target: str - :param category: Category of the workspace connection. Default value is None. - :type category: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either WorkspaceConnectionPropertiesV2BasicResource or - the result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult] = kwargs.pop("cls", None) - - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - request = build_list_request( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - subscription_id=self._config.subscription_id, - target=target, - category=category, - api_version=api_version, - template_url=self.list.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize( - "WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult", pipeline_response - ) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = self._deserialize("WorkspaceConnectionPropertiesV2BasicResource", pipeline_response) - return pipeline_response + if cls: + return cls(pipeline_response, deserialized, {}) - return ItemPaged(get_next, extract_data) + return deserialized - list.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections" + list_secrets.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}/listsecrets" } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_features_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_features_operations.py index a8b05127e633..32a640d0d103 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_features_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_features_operations.py @@ -27,7 +27,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -42,7 +42,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -60,7 +60,7 @@ def build_list_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspaces_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspaces_operations.py index 3531e4cd54cc..2338af3a5aae 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspaces_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspaces_operations.py @@ -30,7 +30,7 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -39,34 +39,31 @@ _SERIALIZER.client_side_validation = False -def build_get_request( - resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +def build_list_by_subscription_request( + subscription_id: str, *, skip: Optional[str] = None, kind: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}", + "template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces" ) # pylint: disable=line-too-long path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 - ), - "workspaceName": _SERIALIZER.url( - "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" - ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if kind is not None: + _params["kind"] = _SERIALIZER.query("kind", kind, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -74,51 +71,54 @@ def build_get_request( return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_create_or_update_request( - resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +def build_list_by_resource_group_request( + resource_group_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + kind: Optional[str] = None, + **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces", ) # pylint: disable=line-too-long path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 ), - "workspaceName": _SERIALIZER.url( - "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" - ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") + if kind is not None: + _params["kind"] = _SERIALIZER.query("kind", kind, "str") # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any + resource_group_name: str, workspace_name: str, subscription_id: str, *, force_to_purge: bool = False, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -136,10 +136,12 @@ def build_delete_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if force_to_purge is not None: + _params["forceToPurge"] = _SERIALIZER.query("force_to_purge", force_to_purge, "bool") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -147,14 +149,13 @@ def build_delete_request( return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) -def build_update_request( +def build_get_request( resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -172,67 +173,69 @@ def build_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_list_by_resource_group_request( - resource_group_name: str, subscription_id: str, *, skip: Optional[str] = None, **kwargs: Any +def build_update_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}", ) # pylint: disable=line-too-long path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "resourceGroupName": _SERIALIZER.url( "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if skip is not None: - _params["$skip"] = _SERIALIZER.query("skip", skip, "str") # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) -def build_diagnose_request( +def build_create_or_update_request( resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}", ) # pylint: disable=line-too-long path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), @@ -244,7 +247,7 @@ def build_diagnose_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -254,22 +257,23 @@ def build_diagnose_request( _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) -def build_list_keys_request( +def build_diagnose_request( resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose", ) # pylint: disable=line-too-long path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), @@ -281,30 +285,32 @@ def build_list_keys_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_resync_keys_request( +def build_list_keys_request( resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys", ) # pylint: disable=line-too-long path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), @@ -316,7 +322,7 @@ def build_resync_keys_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -327,49 +333,54 @@ def build_resync_keys_request( return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_list_by_subscription_request( - subscription_id: str, *, skip: Optional[str] = None, **kwargs: Any +def build_list_notebook_access_token_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( - "template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces" + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken", ) # pylint: disable=line-too-long path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if skip is not None: - _params["$skip"] = _SERIALIZER.query("skip", skip, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_list_notebook_access_token_request( +def build_list_notebook_keys_request( resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys", ) # pylint: disable=line-too-long path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), @@ -381,7 +392,7 @@ def build_list_notebook_access_token_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -392,19 +403,19 @@ def build_list_notebook_access_token_request( return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_prepare_notebook_request( +def build_list_storage_account_keys_request( resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys", ) # pylint: disable=line-too-long path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), @@ -416,7 +427,7 @@ def build_prepare_notebook_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -427,19 +438,19 @@ def build_prepare_notebook_request( return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_list_storage_account_keys_request( +def build_list_outbound_network_dependencies_endpoints_request( resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints", ) # pylint: disable=line-too-long path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), @@ -451,7 +462,7 @@ def build_list_storage_account_keys_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -459,22 +470,22 @@ def build_list_storage_account_keys_request( # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_list_notebook_keys_request( +def build_prepare_notebook_request( resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook", ) # pylint: disable=line-too-long path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), @@ -486,7 +497,7 @@ def build_list_notebook_keys_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -497,19 +508,19 @@ def build_list_notebook_keys_request( return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_list_outbound_network_dependencies_endpoints_request( +def build_resync_keys_request( resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-04-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", - "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys", ) # pylint: disable=line-too-long path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), @@ -521,7 +532,7 @@ def build_list_outbound_network_dependencies_endpoints_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -529,7 +540,7 @@ def build_list_outbound_network_dependencies_endpoints_request( # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) class WorkspacesOperations: @@ -552,19 +563,28 @@ def __init__(self, *args, **kwargs): self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _models.Workspace: - """Gets the properties of the specified machine learning workspace. + def list_by_subscription( + self, skip: Optional[str] = None, kind: Optional[str] = None, **kwargs: Any + ) -> Iterable["_models.Workspace"]: + """Lists all the available machine learning workspaces under the specified subscription. - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. Required. - :type workspace_name: str + Lists all the available machine learning workspaces under the specified subscription. + + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param kind: Kind of workspace. Default value is None. + :type kind: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Workspace or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.Workspace + :return: An iterator like instance of either Workspace or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) + error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -573,50 +593,94 @@ def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _ } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + def prepare_request(next_link=None): + if not next_link: - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) + request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + skip=skip, + kind=kind, + api_version=api_version, + template_url=self.list_by_subscription.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - request = build_get_request( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=self.get.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) + def extract_data(pipeline_response): + deserialized = self._deserialize("WorkspaceListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) - response = pipeline_response.http_response + def get_next(next_link=None): + request = prepare_request(next_link) - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response - deserialized = self._deserialize("Workspace", pipeline_response) + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if cls: - return cls(pipeline_response, deserialized, {}) + return pipeline_response - return deserialized + return ItemPaged(get_next, extract_data) - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" + list_by_subscription.metadata = { + "url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces" } - def _create_or_update_initial( - self, resource_group_name: str, workspace_name: str, parameters: Union[_models.Workspace, IO], **kwargs: Any - ) -> Optional[_models.Workspace]: + @distributed_trace + def list_by_resource_group( + self, resource_group_name: str, skip: Optional[str] = None, kind: Optional[str] = None, **kwargs: Any + ) -> Iterable["_models.Workspace"]: + """Lists all the available machine learning workspaces under the specified resource group. + + Lists all the available machine learning workspaces under the specified resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param kind: Kind of workspace. Default value is None. + :type kind: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either Workspace or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) + error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -625,30 +689,93 @@ def _create_or_update_initial( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + def prepare_request(next_link=None): + if not next_link: - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.Workspace]] = kwargs.pop("cls", None) + request = build_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + skip=skip, + kind=kind, + api_version=api_version, + template_url=self.list_by_resource_group.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters - else: - _json = self._serialize.body(parameters, "Workspace") + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request - request = build_create_or_update_request( + def extract_data(pipeline_response): + deserialized = self._deserialize("WorkspaceListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list_by_resource_group.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces" + } + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, force_to_purge: bool = False, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, + force_to_purge=force_to_purge, api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - template_url=self._create_or_update_initial.metadata["url"], + template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) @@ -662,113 +789,33 @@ def _create_or_update_initial( response = pipeline_response.http_response - if response.status_code not in [200, 202]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("Workspace", pipeline_response) - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, None, {}) - _create_or_update_initial.metadata = { + _delete_initial.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" } - @overload - def begin_create_or_update( - self, - resource_group_name: str, - workspace_name: str, - parameters: _models.Workspace, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> LROPoller[_models.Workspace]: - """Creates or updates a workspace with the specified parameters. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. Required. - :type workspace_name: str - :param parameters: The parameters for creating or updating a machine learning workspace. - Required. - :type parameters: ~azure.mgmt.machinelearningservices.models.Workspace - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either Workspace or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Workspace] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def begin_create_or_update( - self, - resource_group_name: str, - workspace_name: str, - parameters: IO, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> LROPoller[_models.Workspace]: - """Creates or updates a workspace with the specified parameters. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. Required. - :type workspace_name: str - :param parameters: The parameters for creating or updating a machine learning workspace. - Required. - :type parameters: IO - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either Workspace or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Workspace] - :raises ~azure.core.exceptions.HttpResponseError: - """ - @distributed_trace - def begin_create_or_update( - self, resource_group_name: str, workspace_name: str, parameters: Union[_models.Workspace, IO], **kwargs: Any - ) -> LROPoller[_models.Workspace]: - """Creates or updates a workspace with the specified parameters. + def begin_delete( + self, resource_group_name: str, workspace_name: str, force_to_purge: bool = False, **kwargs: Any + ) -> LROPoller[None]: + """Deletes a machine learning workspace. + + Deletes a machine learning workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param parameters: The parameters for creating or updating a machine learning workspace. Is - either a Workspace type or a IO type. Required. - :type parameters: ~azure.mgmt.machinelearningservices.models.Workspace or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str + :param force_to_purge: Flag to indicate delete is a purge request. Default value is False. + :type force_to_purge: bool :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -777,26 +824,24 @@ def begin_create_or_update( :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either Workspace or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) + cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._create_or_update_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, - parameters=parameters, + force_to_purge=force_to_purge, api_version=api_version, - content_type=content_type, cls=lambda x, y, z: x, headers=_headers, params=_params, @@ -804,11 +849,9 @@ def begin_create_or_update( ) kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("Workspace", pipeline_response) + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized + return cls(pipeline_response, None, {}) if polling is True: polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) @@ -825,13 +868,26 @@ def get_long_running_output(pipeline_response): ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = { + begin_delete.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" } - def _delete_initial( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> None: + @distributed_trace + def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _models.Workspace: + """Gets the properties of the specified machine learning workspace. + + Gets the properties of the specified machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Workspace or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Workspace + :raises ~azure.core.exceptions.HttpResponseError: + """ error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -844,14 +900,14 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) - request = build_delete_request( + request = build_get_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata["url"], + template_url=self.get.metadata["url"], headers=_headers, params=_params, ) @@ -865,79 +921,19 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response - if response.status_code not in [200, 202, 204]: + if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if cls: - return cls(pipeline_response, None, {}) - - _delete_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" - } - - @distributed_trace - def begin_delete(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> LROPoller[None]: - """Deletes a machine learning workspace. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. Required. - :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._delete_initial( # type: ignore - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - kwargs.pop("error_map", None) + deserialized = self._deserialize("Workspace", pipeline_response) - def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements - if cls: - return cls(pipeline_response, None, {}) + if cls: + return cls(pipeline_response, deserialized, {}) - if polling is True: - polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + return deserialized - begin_delete.metadata = { + get.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" } @@ -945,7 +941,7 @@ def _update_initial( self, resource_group_name: str, workspace_name: str, - parameters: Union[_models.WorkspaceUpdateParameters, IO], + body: Union[_models.WorkspaceUpdateParameters, IO], **kwargs: Any ) -> Optional[_models.Workspace]: error_map = { @@ -966,10 +962,10 @@ def _update_initial( content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters + if isinstance(body, (IOBase, bytes)): + _content = body else: - _json = self._serialize.body(parameters, "WorkspaceUpdateParameters") + _json = self._serialize.body(body, "WorkspaceUpdateParameters") request = build_update_request( resource_group_name=resource_group_name, @@ -1016,20 +1012,22 @@ def begin_update( self, resource_group_name: str, workspace_name: str, - parameters: _models.WorkspaceUpdateParameters, + body: _models.WorkspaceUpdateParameters, *, content_type: str = "application/json", **kwargs: Any ) -> LROPoller[_models.Workspace]: """Updates a machine learning workspace with the specified parameters. + Updates a machine learning workspace with the specified parameters. + :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param parameters: The parameters for updating a machine learning workspace. Required. - :type parameters: ~azure.mgmt.machinelearningservices.models.WorkspaceUpdateParameters + :param body: The parameters for updating a machine learning workspace. Required. + :type body: ~azure.mgmt.machinelearningservices.models.WorkspaceUpdateParameters :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -1051,20 +1049,22 @@ def begin_update( self, resource_group_name: str, workspace_name: str, - parameters: IO, + body: IO, *, content_type: str = "application/json", **kwargs: Any ) -> LROPoller[_models.Workspace]: """Updates a machine learning workspace with the specified parameters. + Updates a machine learning workspace with the specified parameters. + :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param parameters: The parameters for updating a machine learning workspace. Required. - :type parameters: IO + :param body: The parameters for updating a machine learning workspace. Required. + :type body: IO :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str @@ -1086,19 +1086,21 @@ def begin_update( self, resource_group_name: str, workspace_name: str, - parameters: Union[_models.WorkspaceUpdateParameters, IO], + body: Union[_models.WorkspaceUpdateParameters, IO], **kwargs: Any ) -> LROPoller[_models.Workspace]: """Updates a machine learning workspace with the specified parameters. + Updates a machine learning workspace with the specified parameters. + :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param parameters: The parameters for updating a machine learning workspace. Is either a + :param body: The parameters for updating a machine learning workspace. Is either a WorkspaceUpdateParameters type or a IO type. Required. - :type parameters: ~azure.mgmt.machinelearningservices.models.WorkspaceUpdateParameters or IO + :type body: ~azure.mgmt.machinelearningservices.models.WorkspaceUpdateParameters or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. Default value is None. :paramtype content_type: str @@ -1127,7 +1129,7 @@ def begin_update( raw_result = self._update_initial( resource_group_name=resource_group_name, workspace_name=workspace_name, - parameters=parameters, + body=body, api_version=api_version, content_type=content_type, cls=lambda x, y, z: x, @@ -1162,103 +1164,237 @@ def get_long_running_output(pipeline_response): "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" } - @distributed_trace - def list_by_resource_group( - self, resource_group_name: str, skip: Optional[str] = None, **kwargs: Any - ) -> Iterable["_models.Workspace"]: - """Lists all the available machine learning workspaces under the specified resource group. + def _create_or_update_initial( + self, resource_group_name: str, workspace_name: str, body: Union[_models.Workspace, IO], **kwargs: Any + ) -> Optional[_models.Workspace]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.Workspace]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "Workspace") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("Workspace", pipeline_response) + + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + body: _models.Workspace, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Creates or updates a workspace with the specified parameters. + + Creates or updates a workspace with the specified parameters. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str - :param skip: Continuation token for pagination. Default value is None. - :type skip: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param body: The parameters for creating or updating a machine learning workspace. Required. + :type body: ~azure.mgmt.machinelearningservices.models.Workspace + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either Workspace or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.Workspace] + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Workspace or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Creates or updates a workspace with the specified parameters. - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) + Creates or updates a workspace with the specified parameters. - def prepare_request(next_link=None): - if not next_link: + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param body: The parameters for creating or updating a machine learning workspace. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Workspace or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ - request = build_list_by_resource_group_request( - resource_group_name=resource_group_name, - subscription_id=self._config.subscription_id, - skip=skip, - api_version=api_version, - template_url=self.list_by_resource_group.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + @distributed_trace + def begin_create_or_update( + self, resource_group_name: str, workspace_name: str, body: Union[_models.Workspace, IO], **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Creates or updates a workspace with the specified parameters. - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + Creates or updates a workspace with the specified parameters. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param body: The parameters for creating or updating a machine learning workspace. Is either a + Workspace type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.Workspace or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Workspace or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) - def extract_data(pipeline_response): - deserialized = self._deserialize("WorkspaceListResult", pipeline_response) - list_of_elem = deserialized.value + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("Workspace", pipeline_response) if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) + return cls(pipeline_response, deserialized, {}) + return deserialized - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - list_by_resource_group.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces" + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" } def _diagnose_initial( self, resource_group_name: str, workspace_name: str, - parameters: Optional[Union[_models.DiagnoseWorkspaceParameters, IO]] = None, + body: Optional[Union[_models.DiagnoseWorkspaceParameters, IO]] = None, **kwargs: Any ) -> Optional[_models.DiagnoseResponseResult]: error_map = { @@ -1279,11 +1415,11 @@ def _diagnose_initial( content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IOBase, bytes)): - _content = parameters + if isinstance(body, (IOBase, bytes)): + _content = body else: - if parameters is not None: - _json = self._serialize.body(parameters, "DiagnoseWorkspaceParameters") + if body is not None: + _json = self._serialize.body(body, "DiagnoseWorkspaceParameters") else: _json = None @@ -1337,7 +1473,7 @@ def begin_diagnose( self, resource_group_name: str, workspace_name: str, - parameters: Optional[_models.DiagnoseWorkspaceParameters] = None, + body: Optional[_models.DiagnoseWorkspaceParameters] = None, *, content_type: str = "application/json", **kwargs: Any @@ -1351,8 +1487,8 @@ def begin_diagnose( :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param parameters: The parameter of diagnosing workspace health. Default value is None. - :type parameters: ~azure.mgmt.machinelearningservices.models.DiagnoseWorkspaceParameters + :param body: The parameter of diagnosing workspace health. Default value is None. + :type body: ~azure.mgmt.machinelearningservices.models.DiagnoseWorkspaceParameters :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -1376,7 +1512,7 @@ def begin_diagnose( self, resource_group_name: str, workspace_name: str, - parameters: Optional[IO] = None, + body: Optional[IO] = None, *, content_type: str = "application/json", **kwargs: Any @@ -1390,8 +1526,8 @@ def begin_diagnose( :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param parameters: The parameter of diagnosing workspace health. Default value is None. - :type parameters: IO + :param body: The parameter of diagnosing workspace health. Default value is None. + :type body: IO :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str @@ -1415,7 +1551,7 @@ def begin_diagnose( self, resource_group_name: str, workspace_name: str, - parameters: Optional[Union[_models.DiagnoseWorkspaceParameters, IO]] = None, + body: Optional[Union[_models.DiagnoseWorkspaceParameters, IO]] = None, **kwargs: Any ) -> LROPoller[_models.DiagnoseResponseResult]: """Diagnose workspace setup issue. @@ -1427,9 +1563,9 @@ def begin_diagnose( :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param parameters: The parameter of diagnosing workspace health. Is either a + :param body: The parameter of diagnosing workspace health. Is either a DiagnoseWorkspaceParameters type or a IO type. Default value is None. - :type parameters: ~azure.mgmt.machinelearningservices.models.DiagnoseWorkspaceParameters or IO + :type body: ~azure.mgmt.machinelearningservices.models.DiagnoseWorkspaceParameters or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. Default value is None. :paramtype content_type: str @@ -1460,7 +1596,7 @@ def begin_diagnose( raw_result = self._diagnose_initial( resource_group_name=resource_group_name, workspace_name=workspace_name, - parameters=parameters, + body=body, api_version=api_version, content_type=content_type, cls=lambda x, y, z: x, @@ -1504,6 +1640,9 @@ def list_keys( """Lists all the keys associated with this workspace. This includes keys for the storage account, app insights and password for container registry. + Lists all the keys associated with this workspace. This includes keys for the storage account, + app insights and password for container registry. + :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str @@ -1563,9 +1702,24 @@ def list_keys( "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys" } - def _resync_keys_initial( # pylint: disable=inconsistent-return-statements + @distributed_trace + def list_notebook_access_token( self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> None: + ) -> _models.NotebookAccessTokenResult: + """Get Azure Machine Learning Workspace notebook access token. + + Get Azure Machine Learning Workspace notebook access token. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: NotebookAccessTokenResult or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.NotebookAccessTokenResult + :raises ~azure.core.exceptions.HttpResponseError: + """ error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1578,14 +1732,14 @@ def _resync_keys_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[_models.NotebookAccessTokenResult] = kwargs.pop("cls", None) - request = build_resync_keys_request( + request = build_list_notebook_access_token_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._resync_keys_initial.metadata["url"], + template_url=self.list_notebook_access_token.metadata["url"], headers=_headers, params=_params, ) @@ -1599,22 +1753,29 @@ def _resync_keys_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response - if response.status_code not in [200, 202]: + if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = self._deserialize("NotebookAccessTokenResult", pipeline_response) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) - _resync_keys_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys" + return deserialized + + list_notebook_access_token.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken" } @distributed_trace - def begin_resync_keys(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> LROPoller[None]: - """Resync all the keys associated with this workspace. This includes keys for the storage account, - app insights and password for container registry. + def list_notebook_keys( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.ListNotebookKeysResult: + """Lists keys of Azure Machine Learning Workspaces notebook. + + Lists keys of Azure Machine Learning Workspaces notebook. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -1622,77 +1783,77 @@ def begin_resync_keys(self, resource_group_name: str, workspace_name: str, **kwa :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] + :return: ListNotebookKeysResult or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ListNotebookKeysResult :raises ~azure.core.exceptions.HttpResponseError: """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._resync_keys_initial( # type: ignore - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - kwargs.pop("error_map", None) + cls: ClsType[_models.ListNotebookKeysResult] = kwargs.pop("cls", None) - def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements - if cls: - return cls(pipeline_response, None, {}) + request = build_list_notebook_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_notebook_keys.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - if polling is True: - polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) - begin_resync_keys.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys" + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ListNotebookKeysResult", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_notebook_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys" } @distributed_trace - def list_by_subscription(self, skip: Optional[str] = None, **kwargs: Any) -> Iterable["_models.Workspace"]: - """Lists all the available machine learning workspaces under the specified subscription. + def list_storage_account_keys( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.ListStorageAccountKeysResult: + """Lists keys of Azure Machine Learning Workspace's storage account. - :param skip: Continuation token for pagination. Default value is None. - :type skip: str + Lists keys of Azure Machine Learning Workspace's storage account. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either Workspace or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.Workspace] + :return: ListStorageAccountKeysResult or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ListStorageAccountKeysResult :raises ~azure.core.exceptions.HttpResponseError: """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1701,72 +1862,56 @@ def list_by_subscription(self, skip: Optional[str] = None, **kwargs: Any) -> Ite } error_map.update(kwargs.pop("error_map", {}) or {}) - def prepare_request(next_link=None): - if not next_link: + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - request = build_list_by_subscription_request( - subscription_id=self._config.subscription_id, - skip=skip, - api_version=api_version, - template_url=self.list_by_subscription.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ListStorageAccountKeysResult] = kwargs.pop("cls", None) - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + request = build_list_storage_account_keys_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_storage_account_keys.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) - def extract_data(pipeline_response): - deserialized = self._deserialize("WorkspaceListResult", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) - def get_next(next_link=None): - request = prepare_request(next_link) + response = pipeline_response.http_response - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = self._deserialize("ListStorageAccountKeysResult", pipeline_response) - return pipeline_response + if cls: + return cls(pipeline_response, deserialized, {}) - return ItemPaged(get_next, extract_data) + return deserialized - list_by_subscription.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces" + list_storage_account_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys" } @distributed_trace - def list_notebook_access_token( + def list_outbound_network_dependencies_endpoints( self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> _models.NotebookAccessTokenResult: - """return notebook access token and refresh token. + ) -> _models.ExternalFQDNResponse: + """Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs) + programmatically. + + Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs) + programmatically. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -1774,8 +1919,8 @@ def list_notebook_access_token( :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: NotebookAccessTokenResult or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.NotebookAccessTokenResult + :return: ExternalFQDNResponse or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ExternalFQDNResponse :raises ~azure.core.exceptions.HttpResponseError: """ error_map = { @@ -1790,14 +1935,14 @@ def list_notebook_access_token( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.NotebookAccessTokenResult] = kwargs.pop("cls", None) + cls: ClsType[_models.ExternalFQDNResponse] = kwargs.pop("cls", None) - request = build_list_notebook_access_token_request( + request = build_list_outbound_network_dependencies_endpoints_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_notebook_access_token.metadata["url"], + template_url=self.list_outbound_network_dependencies_endpoints.metadata["url"], headers=_headers, params=_params, ) @@ -1816,15 +1961,15 @@ def list_notebook_access_token( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("NotebookAccessTokenResult", pipeline_response) + deserialized = self._deserialize("ExternalFQDNResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_notebook_access_token.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken" + list_outbound_network_dependencies_endpoints.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints" } def _prepare_notebook_initial( @@ -1869,11 +2014,16 @@ def _prepare_notebook_initial( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None + response_headers = {} if response.status_code == 200: deserialized = self._deserialize("NotebookResourceInfo", pipeline_response) + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, response_headers) return deserialized @@ -1885,7 +2035,9 @@ def _prepare_notebook_initial( def begin_prepare_notebook( self, resource_group_name: str, workspace_name: str, **kwargs: Any ) -> LROPoller[_models.NotebookResourceInfo]: - """Prepare a notebook. + """Prepare Azure Machine Learning Workspace's notebook resource. + + Prepare Azure Machine Learning Workspace's notebook resource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -1953,87 +2105,9 @@ def get_long_running_output(pipeline_response): "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook" } - @distributed_trace - def list_storage_account_keys( - self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> _models.ListStorageAccountKeysResult: - """List storage account keys of a workspace. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. Required. - :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListStorageAccountKeysResult or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.ListStorageAccountKeysResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ListStorageAccountKeysResult] = kwargs.pop("cls", None) - - request = build_list_storage_account_keys_request( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=self.list_storage_account_keys.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("ListStorageAccountKeysResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - list_storage_account_keys.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys" - } - - @distributed_trace - def list_notebook_keys( + def _resync_keys_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> _models.ListNotebookKeysResult: - """List keys of a notebook. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. Required. - :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListNotebookKeysResult or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.ListNotebookKeysResult - :raises ~azure.core.exceptions.HttpResponseError: - """ + ) -> None: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2046,14 +2120,14 @@ def list_notebook_keys( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ListNotebookKeysResult] = kwargs.pop("cls", None) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_list_notebook_keys_request( + request = build_resync_keys_request( resource_group_name=resource_group_name, workspace_name=workspace_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_notebook_keys.metadata["url"], + template_url=self._resync_keys_initial.metadata["url"], headers=_headers, params=_params, ) @@ -2067,31 +2141,30 @@ def list_notebook_keys( response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("ListNotebookKeysResult", pipeline_response) + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, None, response_headers) - list_notebook_keys.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys" + _resync_keys_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys" } @distributed_trace - def list_outbound_network_dependencies_endpoints( - self, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> _models.ExternalFQDNResponse: - """Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs) - programmatically. + def begin_resync_keys(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> LROPoller[None]: + """Resync all the keys associated with this workspace.This includes keys for the storage account, + app insights and password for container registry. - Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs) - programmatically. + Resync all the keys associated with this workspace.This includes keys for the storage account, + app insights and password for container registry. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. @@ -2099,55 +2172,58 @@ def list_outbound_network_dependencies_endpoints( :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ExternalFQDNResponse or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.ExternalFQDNResponse + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.ExternalFQDNResponse] = kwargs.pop("cls", None) - - request = build_list_outbound_network_dependencies_endpoints_request( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=self.list_outbound_network_dependencies_endpoints.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("ExternalFQDNResponse", pipeline_response) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._resync_keys_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) - if cls: - return cls(pipeline_response, deserialized, {}) + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) - return deserialized + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - list_outbound_network_dependencies_endpoints.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints" + begin_resync_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys" } diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/aks_compute.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/aks_compute.py index 6dca84bdc7dd..75f966dc7672 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/aks_compute.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/aks_compute.py @@ -46,6 +46,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Compute/createOrUpdate/AKSCompute.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/createOrUpdate/AKSCompute.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/aml_compute.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/aml_compute.py index efbef7e090e7..e3aa038b10ef 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/aml_compute.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/aml_compute.py @@ -47,6 +47,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Compute/createOrUpdate/AmlCompute.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/createOrUpdate/AmlCompute.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/basic_aks_compute.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/basic_aks_compute.py index c4f5100e5166..61efdd17e383 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/basic_aks_compute.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/basic_aks_compute.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Compute/createOrUpdate/BasicAKSCompute.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/createOrUpdate/BasicAKSCompute.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/basic_aml_compute.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/basic_aml_compute.py index 4e8789e3ef24..6184a6e6781b 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/basic_aml_compute.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/basic_aml_compute.py @@ -55,6 +55,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Compute/createOrUpdate/BasicAmlCompute.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/createOrUpdate/BasicAmlCompute.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/basic_data_factory_compute.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/basic_data_factory_compute.py index 322dec99f0ea..df362b7e1b00 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/basic_data_factory_compute.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/basic_data_factory_compute.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Compute/createOrUpdate/BasicDataFactoryCompute.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/createOrUpdate/BasicDataFactoryCompute.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/compute_instance.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/compute_instance.py index d2e8342bc1ad..12d7ee70a5e6 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/compute_instance.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/compute_instance.py @@ -39,30 +39,43 @@ def main(): "computeType": "ComputeInstance", "properties": { "applicationSharingPolicy": "Personal", + "autologgerSettings": {"mlflowAutologger": "Enabled"}, "computeInstanceAuthorizationType": "personal", "customServices": [ { "docker": {"privileged": True}, - "endpoints": [{"name": "connect", "protocol": "http", "published": 8787, "target": 8787}], - "environmentVariables": {"test_variable": {"type": "local", "value": "test_value"}}, - "image": {"reference": "ghcr.io/azure/rocker-rstudio-ml-verse:latest", "type": "docker"}, - "name": "rstudio", + "endpoints": [ + { + "hostIp": None, + "name": "connect", + "protocol": "http", + "published": 4444, + "target": 8787, + } + ], + "environmentVariables": { + "RSP_LICENSE": {"type": "local", "value": "XXXX-XXXX-XXXX-XXXX-XXXX-XXXX-XXXX"} + }, + "image": {"reference": "ghcr.io/azure/rstudio-workbench:latest", "type": "docker"}, + "name": "rstudio-workbench", "volumes": [ { - "readOnly": False, - "source": "/home/azureuser/cloudfiles", - "target": "/home/azureuser/cloudfiles", + "readOnly": True, + "source": "/mnt/azureuser/", + "target": "/home/testuser/", "type": "bind", } ], } ], + "enableOSPatching": True, "personalComputeInstanceSettings": { "assignedUser": { "objectId": "00000000-0000-0000-0000-000000000000", "tenantId": "00000000-0000-0000-0000-000000000000", } }, + "releaseQuotaOnStop": True, "sshSettings": {"sshPublicAccess": "Disabled"}, "subnet": {"id": "test-subnet-resource-id"}, "vmSize": "STANDARD_NC6", @@ -73,6 +86,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Compute/createOrUpdate/ComputeInstance.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/createOrUpdate/ComputeInstance.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/compute_instance_minimal.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/compute_instance_minimal.py index 575439d32563..8e1ce625749a 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/compute_instance_minimal.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/compute_instance_minimal.py @@ -41,6 +41,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Compute/createOrUpdate/ComputeInstanceMinimal.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/createOrUpdate/ComputeInstanceMinimal.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/compute_instance_with_schedules.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/compute_instance_with_schedules.py index 4134df53117f..f3ba277f5714 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/compute_instance_with_schedules.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/compute_instance_with_schedules.py @@ -69,6 +69,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Compute/createOrUpdate/ComputeInstanceWithSchedules.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/createOrUpdate/ComputeInstanceWithSchedules.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/kubernetes_compute.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/kubernetes_compute.py index 07fc5057133b..5c91ec216c2a 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/kubernetes_compute.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/create_or_update/kubernetes_compute.py @@ -58,6 +58,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Compute/createOrUpdate/KubernetesCompute.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/createOrUpdate/KubernetesCompute.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/delete.py index 03e8f509fbae..f10741d556ea 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Compute/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/get/aks_compute.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/get/aks_compute.py index 4692f63ff9d7..36382b240d7d 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/get/aks_compute.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/get/aks_compute.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Compute/get/AKSCompute.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/get/AKSCompute.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/get/aml_compute.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/get/aml_compute.py index 59ea7d05e296..5a0fea75859b 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/get/aml_compute.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/get/aml_compute.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Compute/get/AmlCompute.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/get/AmlCompute.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/get/compute_instance.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/get/compute_instance.py index fe66677f7193..f6b8d03562b0 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/get/compute_instance.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/get/compute_instance.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Compute/get/ComputeInstance.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/get/ComputeInstance.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/get/kubernetes_compute.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/get/kubernetes_compute.py index b09cac4f3f62..1d794562e79a 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/get/kubernetes_compute.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/get/kubernetes_compute.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Compute/get/KubernetesCompute.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/get/KubernetesCompute.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/get_allowed_vm_sizes_for_resize.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/get_allowed_vm_sizes_for_resize.py new file mode 100644 index 000000000000..50a071a75216 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/get_allowed_vm_sizes_for_resize.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python get_allowed_vm_sizes_for_resize.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="34adfa4f-cedf-4dc0-ba29-b6d1a69ab345", + ) + + response = client.compute.get_allowed_resize_sizes( + resource_group_name="testrg123", + workspace_name="workspaces123", + compute_name="compute123", + ) + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/getAllowedVMSizesForResize.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/list.py index 884ceeb73b59..b57d868861f1 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/list.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Compute/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/list_keys.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/list_keys.py index ed185cc80326..8c1d8f41bd20 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/list_keys.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/list_keys.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Compute/listKeys.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/listKeys.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/list_nodes.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/list_nodes.py index 66ee6a0f4bee..cafe7e6f4e2f 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/list_nodes.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/list_nodes.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Compute/listNodes.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/listNodes.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/patch.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/patch.py index 344934f7929b..2e7b7196348a 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/patch.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/patch.py @@ -44,6 +44,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Compute/patch.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/patch.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/resize.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/resize.py new file mode 100644 index 000000000000..ddee93cb2c84 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/resize.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python resize.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="34adfa4f-cedf-4dc0-ba29-b6d1a69ab345", + ) + + client.compute.begin_resize( + resource_group_name="testrg123", + workspace_name="workspaces123", + compute_name="compute123", + parameters={"targetVMSize": "Standard_DS11_v2"}, + ).result() + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/resize.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/restart.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/restart.py index 6f314eb40ed9..9065d9279953 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/restart.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/restart.py @@ -36,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Compute/restart.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/restart.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/start.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/start.py index 9e6d787b739b..bc38beb19301 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/start.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/start.py @@ -36,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Compute/start.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/start.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/stop.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/stop.py index 431311991b67..112e0878cf1c 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/stop.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/stop.py @@ -36,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Compute/stop.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/stop.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/update_custom_services.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/update_custom_services.py new file mode 100644 index 000000000000..bb6fc86f4dca --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/update_custom_services.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python update_custom_services.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="34adfa4f-cedf-4dc0-ba29-b6d1a69ab345", + ) + + client.compute.update_custom_services( + resource_group_name="testrg123", + workspace_name="workspaces123", + compute_name="compute123", + custom_services=[ + { + "docker": {"privileged": True}, + "endpoints": [ + {"hostIp": None, "name": "connect", "protocol": "http", "published": 4444, "target": 8787} + ], + "environmentVariables": { + "RSP_LICENSE": {"type": "local", "value": "XXXX-XXXX-XXXX-XXXX-XXXX-XXXX-XXXX"} + }, + "image": {"reference": "ghcr.io/azure/rstudio-workbench:latest", "type": "docker"}, + "name": "rstudio-workbench", + "volumes": [ + {"readOnly": True, "source": "/mnt/azureuser/", "target": "/home/testuser/", "type": "bind"} + ], + } + ], + ) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/updateCustomServices.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/update_idle_shutdown_setting.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/update_idle_shutdown_setting.py new file mode 100644 index 000000000000..7221a16f3baf --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/compute/update_idle_shutdown_setting.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python update_idle_shutdown_setting.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="34adfa4f-cedf-4dc0-ba29-b6d1a69ab345", + ) + + client.compute.update_idle_shutdown_setting( + resource_group_name="testrg123", + workspace_name="workspaces123", + compute_name="compute123", + parameters={"idleTimeBeforeShutdown": "PT120M"}, + ) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Compute/updateIdleShutdownSetting.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/azure_blob_waccount_key/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/azure_blob_waccount_key/create_or_update.py index f51f887bbd51..ef207039c48f 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/azure_blob_waccount_key/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/azure_blob_waccount_key/create_or_update.py @@ -53,6 +53,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Datastore/AzureBlobWAccountKey/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Datastore/AzureBlobWAccountKey/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/azure_data_lake_gen1_wservice_principal/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/azure_data_lake_gen1_wservice_principal/create_or_update.py index bfbb9e2ecda5..34adce0979d3 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/azure_data_lake_gen1_wservice_principal/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/azure_data_lake_gen1_wservice_principal/create_or_update.py @@ -54,6 +54,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Datastore/AzureDataLakeGen1WServicePrincipal/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Datastore/AzureDataLakeGen1WServicePrincipal/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/azure_data_lake_gen2_wservice_principal/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/azure_data_lake_gen2_wservice_principal/create_or_update.py index d221f4673621..32642c927aab 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/azure_data_lake_gen2_wservice_principal/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/azure_data_lake_gen2_wservice_principal/create_or_update.py @@ -57,6 +57,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Datastore/AzureDataLakeGen2WServicePrincipal/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Datastore/AzureDataLakeGen2WServicePrincipal/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/azure_file_waccount_key/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/azure_file_waccount_key/create_or_update.py index 0ae6c4a4789c..a0689522505e 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/azure_file_waccount_key/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/azure_file_waccount_key/create_or_update.py @@ -53,6 +53,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Datastore/AzureFileWAccountKey/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Datastore/AzureFileWAccountKey/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/delete.py index 78dd652ab719..4eb4b295752b 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/delete.py @@ -36,6 +36,6 @@ def main(): ) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Datastore/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Datastore/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/get.py index 93e17e5a0997..de494d818eff 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Datastore/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Datastore/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/list.py index 4b5de229b6e2..bc5c6698b6d4 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/list.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Datastore/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Datastore/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/list_secrets.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/list_secrets.py index 74fe51d50b35..fed5f4034f3c 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/list_secrets.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/datastore/list_secrets.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Datastore/listSecrets.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Datastore/listSecrets.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/external_fqdn/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/external_fqdn/get.py index aa82632feffb..f9223214f459 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/external_fqdn/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/external_fqdn/get.py @@ -36,6 +36,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/ExternalFQDN/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/ExternalFQDN/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/feature/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/feature/get.py new file mode 100644 index 000000000000..57aea6f7f25a --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/feature/get.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python get.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.features.get( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + featureset_name="string", + featureset_version="string", + feature_name="string", + ) + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Feature/get.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/feature/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/feature/list.py new file mode 100644 index 000000000000..8780d9705751 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/feature/list.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python list.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.features.list( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + featureset_name="string", + featureset_version="string", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Feature/list.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/auto_ml_job/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/auto_ml_job/create_or_update.py index 47f02ffa0535..dd66c7ceb285 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/auto_ml_job/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/auto_ml_job/create_or_update.py @@ -35,52 +35,59 @@ def main(): id="string", body={ "properties": { + "autologgerSettings": {"mlflowAutologger": "Disabled"}, + "codeId": "string", + "command": "string", + "componentId": "string", "computeId": "string", "description": "string", "displayName": "string", + "distribution": {"distributionType": "TensorFlow", "parameterServerCount": 1, "workerCount": 1}, "environmentId": "string", "environmentVariables": {"string": "string"}, "experimentName": "string", "identity": {"identityType": "AMLToken"}, + "inputs": {"string": {"description": "string", "jobInputType": "literal", "value": "string"}}, "isArchived": False, - "jobType": "AutoML", + "jobType": "Command", + "limits": {"jobLimitsType": "Command", "timeout": "PT5M"}, + "notificationSetting": {"emailOn": ["JobCompleted"], "emails": ["string"]}, "outputs": { "string": { + "assetName": "string", + "assetVersion": "string", "description": "string", "jobOutputType": "uri_file", - "mode": "ReadWriteMount", + "mode": "Direct", "uri": "string", } }, "properties": {"string": "string"}, + "queueSettings": {"jobTier": "Premium", "priority": 1}, "resources": { + "dockerArgs": "string", "instanceCount": 1, "instanceType": "string", - "properties": {"string": {"9bec0ab0-c62f-4fa9-a97c-7b24bbcc90ad": None}}, + "locations": ["string"], + "properties": {"string": {"f69c8d5a-9b39-4183-92d3-a2b18944cf95": None}}, + "shmSize": "2g", }, "services": { "string": { "endpoint": "string", "jobServiceType": "string", + "nodes": {"nodesValueType": "All"}, "port": 1, "properties": {"string": "string"}, } }, "tags": {"string": "string"}, - "taskDetails": { - "limitSettings": {"maxTrials": 2}, - "modelSettings": {"validationCropSize": 2}, - "searchSpace": [{"validationCropSize": "choice(2, 360)"}], - "targetColumnName": "string", - "taskType": "ImageClassification", - "trainingData": {"jobInputType": "mltable", "uri": "string"}, - }, } }, ) print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Job/AutoMLJob/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Job/AutoMLJob/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/auto_ml_job/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/auto_ml_job/get.py index 5744ad654a74..e3fb19c03c3b 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/auto_ml_job/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/auto_ml_job/get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Job/AutoMLJob/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Job/AutoMLJob/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/auto_ml_job/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/auto_ml_job/list.py index 2b84eaeaa4ea..d4b7d8ef81c4 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/auto_ml_job/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/auto_ml_job/list.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Job/AutoMLJob/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Job/AutoMLJob/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/cancel.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/cancel.py index 92009c446172..d46d0171f206 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/cancel.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/cancel.py @@ -36,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Job/cancel.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Job/cancel.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/command_job/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/command_job/create_or_update.py index d37efcce8ad5..dfff08d650b3 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/command_job/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/command_job/create_or_update.py @@ -35,8 +35,10 @@ def main(): id="string", body={ "properties": { + "autologgerSettings": {"mlflowAutologger": "Disabled"}, "codeId": "string", "command": "string", + "componentId": "string", "computeId": "string", "description": "string", "displayName": "string", @@ -46,26 +48,35 @@ def main(): "experimentName": "string", "identity": {"identityType": "AMLToken"}, "inputs": {"string": {"description": "string", "jobInputType": "literal", "value": "string"}}, + "isArchived": False, "jobType": "Command", "limits": {"jobLimitsType": "Command", "timeout": "PT5M"}, + "notificationSetting": {"emailOn": ["JobCancelled"], "emails": ["string"]}, "outputs": { "string": { + "assetName": "string", + "assetVersion": "string", "description": "string", "jobOutputType": "uri_file", - "mode": "ReadWriteMount", + "mode": "Upload", "uri": "string", } }, "properties": {"string": "string"}, + "queueSettings": {"jobTier": "Basic", "priority": 1}, "resources": { + "dockerArgs": "string", "instanceCount": 1, "instanceType": "string", - "properties": {"string": {"e6b6493e-7d5e-4db3-be1e-306ec641327e": None}}, + "locations": ["string"], + "properties": {"string": {"c9ac10d0-915b-4de5-afe8-a4c78a37a558": None}}, + "shmSize": "2g", }, "services": { "string": { "endpoint": "string", "jobServiceType": "string", + "nodes": {"nodesValueType": "All"}, "port": 1, "properties": {"string": "string"}, } @@ -77,6 +88,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Job/CommandJob/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Job/CommandJob/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/command_job/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/command_job/get.py index da20b17f687c..63730c3b9536 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/command_job/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/command_job/get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Job/CommandJob/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Job/CommandJob/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/command_job/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/command_job/list.py index 603f7bbbc412..b0f48cd01842 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/command_job/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/command_job/list.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Job/CommandJob/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Job/CommandJob/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/delete.py index 80e82b2866eb..e349499f10d9 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/delete.py @@ -32,10 +32,10 @@ def main(): client.jobs.begin_delete( resource_group_name="test-rg", workspace_name="my-aml-workspace", - id="http://subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/my-favorite-aml-job", + id="string", ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Job/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Job/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/pipeline_job/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/pipeline_job/create_or_update.py index 8d093b05a24b..b403f1828e35 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/pipeline_job/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/pipeline_job/create_or_update.py @@ -61,6 +61,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Job/PipelineJob/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Job/PipelineJob/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/pipeline_job/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/pipeline_job/get.py index 69dd018de1ad..a3406708611b 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/pipeline_job/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/pipeline_job/get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Job/PipelineJob/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Job/PipelineJob/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/pipeline_job/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/pipeline_job/list.py index 2ee9adaf0c1e..d7ea24b6b9fa 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/pipeline_job/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/pipeline_job/list.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Job/PipelineJob/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Job/PipelineJob/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/sweep_job/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/sweep_job/create_or_update.py index 39198f670f47..bb60ed013152 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/sweep_job/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/sweep_job/create_or_update.py @@ -35,49 +35,59 @@ def main(): id="string", body={ "properties": { + "autologgerSettings": {"mlflowAutologger": "Disabled"}, + "codeId": "string", + "command": "string", + "componentId": "string", "computeId": "string", "description": "string", "displayName": "string", - "earlyTermination": {"delayEvaluation": 1, "evaluationInterval": 1, "policyType": "MedianStopping"}, + "distribution": {"distributionType": "TensorFlow", "parameterServerCount": 1, "workerCount": 1}, + "environmentId": "string", + "environmentVariables": {"string": "string"}, "experimentName": "string", - "jobType": "Sweep", - "limits": { - "jobLimitsType": "Sweep", - "maxConcurrentTrials": 1, - "maxTotalTrials": 1, - "trialTimeout": "PT1S", + "identity": {"identityType": "AMLToken"}, + "inputs": {"string": {"description": "string", "jobInputType": "literal", "value": "string"}}, + "isArchived": False, + "jobType": "Command", + "limits": {"jobLimitsType": "Command", "timeout": "PT5M"}, + "notificationSetting": {"emailOn": ["JobCompleted"], "emails": ["string"]}, + "outputs": { + "string": { + "assetName": "string", + "assetVersion": "string", + "description": "string", + "jobOutputType": "uri_file", + "mode": "ReadWriteMount", + "uri": "string", + } }, - "objective": {"goal": "Minimize", "primaryMetric": "string"}, "properties": {"string": "string"}, - "samplingAlgorithm": {"samplingAlgorithmType": "Grid"}, - "searchSpace": {"string": {}}, + "queueSettings": {"jobTier": "Basic", "priority": 1}, + "resources": { + "dockerArgs": "string", + "instanceCount": 1, + "instanceType": "string", + "locations": ["string"], + "properties": {"string": {"5fc1f627-491e-45a0-a6a2-f5b4be884911": None}}, + "shmSize": "2g", + }, "services": { "string": { "endpoint": "string", "jobServiceType": "string", + "nodes": {"nodesValueType": "All"}, "port": 1, "properties": {"string": "string"}, } }, "tags": {"string": "string"}, - "trial": { - "codeId": "string", - "command": "string", - "distribution": {"distributionType": "Mpi", "processCountPerInstance": 1}, - "environmentId": "string", - "environmentVariables": {"string": "string"}, - "resources": { - "instanceCount": 1, - "instanceType": "string", - "properties": {"string": {"e6b6493e-7d5e-4db3-be1e-306ec641327e": None}}, - }, - }, } }, ) print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Job/SweepJob/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Job/SweepJob/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/sweep_job/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/sweep_job/get.py index 4c424fedc4e7..e83761df9206 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/sweep_job/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/sweep_job/get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Job/SweepJob/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Job/SweepJob/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/sweep_job/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/sweep_job/list.py index 3081f0d1b58d..0c6d6b697c8a 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/sweep_job/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/sweep_job/list.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Job/SweepJob/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Job/SweepJob/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/update.py new file mode 100644 index 000000000000..02198ba70d24 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/job/update.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python update.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.jobs.update( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + id="string", + body={ + "properties": { + "notificationSetting": {"webhooks": {"string": {"eventType": "string", "webhookType": "AzureDevOps"}}} + } + }, + ) + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Job/update.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/labeling_job/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/labeling_job/create_or_update.py new file mode 100644 index 000000000000..52b91f68834e --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/labeling_job/create_or_update.py @@ -0,0 +1,75 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python create_or_update.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.labeling_jobs.begin_create_or_update( + resource_group_name="workspace-1234", + workspace_name="testworkspace", + id="testLabelingJob", + body={ + "properties": { + "description": "string", + "jobInstructions": {"uri": "link/to/instructions"}, + "jobType": "Labeling", + "labelCategories": { + "myCategory1": { + "classes": { + "myLabelClass1": {"displayName": "myLabelClass1", "subclasses": {}}, + "myLabelClass2": {"displayName": "myLabelClass2", "subclasses": {}}, + }, + "displayName": "myCategory1Title", + "multiSelect": "Disabled", + }, + "myCategory2": { + "classes": { + "myLabelClass1": {"displayName": "myLabelClass1", "subclasses": {}}, + "myLabelClass2": {"displayName": "myLabelClass2", "subclasses": {}}, + }, + "displayName": "myCategory2Title", + "multiSelect": "Disabled", + }, + }, + "labelingJobMediaProperties": {"mediaType": "Image"}, + "mlAssistConfiguration": { + "inferencingComputeBinding": "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/computes/myscoringcompute", + "mlAssist": "Enabled", + "trainingComputeBinding": "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/resourceGroup-1234/providers/Microsoft.MachineLearningServices/workspaces/testworkspace/computes/mytrainingompute", + }, + "properties": {"additionalProp1": "string", "additionalProp2": "string", "additionalProp3": "string"}, + "tags": {"additionalProp1": "string", "additionalProp2": "string", "additionalProp3": "string"}, + } + }, + ).result() + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/LabelingJob/createOrUpdate.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/labeling_job/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/labeling_job/delete.py new file mode 100644 index 000000000000..9036617d0308 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/labeling_job/delete.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python delete.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + client.labeling_jobs.delete( + resource_group_name="workspace-1234", + workspace_name="testworkspace", + id="testLabelingJob", + ) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/LabelingJob/delete.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/labeling_job/export_labels.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/labeling_job/export_labels.py new file mode 100644 index 000000000000..c85820dff0f3 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/labeling_job/export_labels.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python export_labels.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.labeling_jobs.begin_export_labels( + resource_group_name="workspace-1234", + workspace_name="testworkspace", + id="testLabelingJob", + body={"format": "Dataset"}, + ).result() + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/LabelingJob/exportLabels.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/labeling_job/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/labeling_job/get.py new file mode 100644 index 000000000000..9e3e0f775bdd --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/labeling_job/get.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python get.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.labeling_jobs.get( + resource_group_name="workspace-1234", + workspace_name="testworkspace", + id="testLabelingJob", + ) + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/LabelingJob/get.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/labeling_job/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/labeling_job/list.py new file mode 100644 index 000000000000..b3e878b7486f --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/labeling_job/list.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python list.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.labeling_jobs.list( + resource_group_name="workspace-1234", + workspace_name="testworkspace", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/LabelingJob/list.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/labeling_job/pause.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/labeling_job/pause.py new file mode 100644 index 000000000000..33bf2c02dea0 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/labeling_job/pause.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python pause.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.labeling_jobs.pause( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + id="string", + ) + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/LabelingJob/pause.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/labeling_job/resume.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/labeling_job/resume.py new file mode 100644 index 000000000000..d479d8f46ec7 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/labeling_job/resume.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python resume.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.labeling_jobs.begin_resume( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + id="string", + ).result() + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/LabelingJob/resume.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/managed_network/create_or_update_rule.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/managed_network/create_or_update_rule.py new file mode 100644 index 000000000000..fda2c49accd0 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/managed_network/create_or_update_rule.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python create_or_update_rule.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.managed_network_settings_rule.begin_create_or_update( + resource_group_name="test-rg", + workspace_name="aml-workspace-name", + rule_name="some_string", + body={ + "properties": {"category": "UserDefined", "destination": "some_string", "status": "Active", "type": "FQDN"} + }, + ).result() + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/ManagedNetwork/createOrUpdateRule.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/managed_network/delete_rule.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/managed_network/delete_rule.py new file mode 100644 index 000000000000..9b7b14632dcd --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/managed_network/delete_rule.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python delete_rule.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + client.managed_network_settings_rule.begin_delete( + resource_group_name="test-rg", + workspace_name="aml-workspace-name", + rule_name="some_string", + ).result() + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/ManagedNetwork/deleteRule.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/managed_network/get_rule.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/managed_network/get_rule.py new file mode 100644 index 000000000000..162a0a928a67 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/managed_network/get_rule.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python get_rule.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.managed_network_settings_rule.get( + resource_group_name="test-rg", + workspace_name="aml-workspace-name", + rule_name="some_string", + ) + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/ManagedNetwork/getRule.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/managed_network/list_rule.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/managed_network/list_rule.py new file mode 100644 index 000000000000..b20475678b91 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/managed_network/list_rule.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python list_rule.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.managed_network_settings_rule.list( + resource_group_name="test-rg", + workspace_name="aml-workspace-name", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/ManagedNetwork/listRule.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/managed_network/provision.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/managed_network/provision.py new file mode 100644 index 000000000000..a9e54349ae9d --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/managed_network/provision.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python provision.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.managed_network_provisions.begin_provision_managed_network( + resource_group_name="test-rg", + workspace_name="aml-workspace-name", + ).result() + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/ManagedNetwork/provision.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/notebook/list_keys.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/notebook/list_keys.py index 66a02eb6b469..d0ee6a53441c 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/notebook/list_keys.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/notebook/list_keys.py @@ -36,6 +36,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Notebook/listKeys.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Notebook/listKeys.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/notebook/prepare.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/notebook/prepare.py index 009f750c227c..833b357fe046 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/notebook/prepare.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/notebook/prepare.py @@ -36,6 +36,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Notebook/prepare.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Notebook/prepare.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/get_logs.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/get_logs.py index 2bae655720a4..127c67d7eaaa 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/get_logs.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/get_logs.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/OnlineDeployment/getLogs.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/OnlineDeployment/getLogs.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/kubernetes_online_deployment/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/kubernetes_online_deployment/create_or_update.py index da3956267da7..92f3f3c0c3ab 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/kubernetes_online_deployment/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/kubernetes_online_deployment/create_or_update.py @@ -74,6 +74,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/OnlineDeployment/KubernetesOnlineDeployment/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/OnlineDeployment/KubernetesOnlineDeployment/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/kubernetes_online_deployment/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/kubernetes_online_deployment/get.py index bccba8918be1..3189b955c371 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/kubernetes_online_deployment/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/kubernetes_online_deployment/get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/OnlineDeployment/KubernetesOnlineDeployment/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/OnlineDeployment/KubernetesOnlineDeployment/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/kubernetes_online_deployment/list_skus.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/kubernetes_online_deployment/list_skus.py index 532af7db70b6..f35a4c95131a 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/kubernetes_online_deployment/list_skus.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/kubernetes_online_deployment/list_skus.py @@ -39,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/OnlineDeployment/KubernetesOnlineDeployment/listSkus.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/OnlineDeployment/KubernetesOnlineDeployment/listSkus.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/kubernetes_online_deployment/update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/kubernetes_online_deployment/update.py index ef4bd08cf870..36995e815171 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/kubernetes_online_deployment/update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/kubernetes_online_deployment/update.py @@ -42,6 +42,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/OnlineDeployment/KubernetesOnlineDeployment/update.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/OnlineDeployment/KubernetesOnlineDeployment/update.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/list.py index 7df4af070406..5c87e93ef08f 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/OnlineDeployment/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/OnlineDeployment/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/managed_online_deployment/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/managed_online_deployment/create_or_update.py index c890a3f55278..a8ba240886dc 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/managed_online_deployment/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/managed_online_deployment/create_or_update.py @@ -77,6 +77,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/OnlineDeployment/ManagedOnlineDeployment/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/OnlineDeployment/ManagedOnlineDeployment/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/managed_online_deployment/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/managed_online_deployment/get.py index ccfa935e7f46..e3f2ed6e8074 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/managed_online_deployment/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/managed_online_deployment/get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/OnlineDeployment/ManagedOnlineDeployment/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/OnlineDeployment/ManagedOnlineDeployment/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/managed_online_deployment/list_skus.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/managed_online_deployment/list_skus.py index ef2b493c784f..5c6bf3171c6a 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/managed_online_deployment/list_skus.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/managed_online_deployment/list_skus.py @@ -39,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/OnlineDeployment/ManagedOnlineDeployment/listSkus.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/OnlineDeployment/ManagedOnlineDeployment/listSkus.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/managed_online_deployment/update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/managed_online_deployment/update.py index e7ec5681f716..1f84a981b6bd 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/managed_online_deployment/update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/online_deployment/managed_online_deployment/update.py @@ -42,6 +42,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/OnlineDeployment/ManagedOnlineDeployment/update.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/OnlineDeployment/ManagedOnlineDeployment/update.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/private_endpoint_connection/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/private_endpoint_connection/create_or_update.py index 4f95d6a5bdef..f8ab73bce63b 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/private_endpoint_connection/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/private_endpoint_connection/create_or_update.py @@ -33,13 +33,13 @@ def main(): resource_group_name="rg-1234", workspace_name="testworkspace", private_endpoint_connection_name="{privateEndpointConnectionName}", - properties={ + body={ "properties": {"privateLinkServiceConnectionState": {"description": "Auto-Approved", "status": "Approved"}} }, ) print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/PrivateEndpointConnection/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/PrivateEndpointConnection/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/private_endpoint_connection/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/private_endpoint_connection/delete.py index d071ba7e4df5..327268c384ea 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/private_endpoint_connection/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/private_endpoint_connection/delete.py @@ -36,6 +36,6 @@ def main(): ) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/PrivateEndpointConnection/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/PrivateEndpointConnection/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/private_endpoint_connection/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/private_endpoint_connection/get.py index f3a4e42b1c2b..4edf7fdbfbc5 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/private_endpoint_connection/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/private_endpoint_connection/get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/PrivateEndpointConnection/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/PrivateEndpointConnection/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/private_endpoint_connection/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/private_endpoint_connection/list.py index 8a893d04e41b..911ea50d0b37 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/private_endpoint_connection/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/private_endpoint_connection/list.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/PrivateEndpointConnection/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/PrivateEndpointConnection/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/private_link_resource/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/private_link_resource/list.py index 71a768ec1c85..15501acf334d 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/private_link_resource/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/private_link_resource/list.py @@ -33,9 +33,10 @@ def main(): resource_group_name="rg-1234", workspace_name="testworkspace", ) - print(response) + for item in response: + print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/PrivateLinkResource/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/PrivateLinkResource/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/quota/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/quota/list.py index ba614e993d14..c18d62ce76ad 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/quota/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/quota/list.py @@ -36,6 +36,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Quota/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Quota/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/quota/update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/quota/update.py index d135e3ae3a58..001919ad1e6f 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/quota/update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/quota/update.py @@ -51,6 +51,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Quota/update.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Quota/update.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/create_or_update_system_created.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/create_or_update_system_created.py index 577787fee2b8..379968eeceef 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/create_or_update_system_created.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/create_or_update_system_created.py @@ -91,6 +91,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registries/createOrUpdate-SystemCreated.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registries/createOrUpdate-SystemCreated.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/create_or_update_user_created.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/create_or_update_user_created.py index 4fb0f626522f..81599dbf9009 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/create_or_update_user_created.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/create_or_update_user_created.py @@ -75,6 +75,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registries/createOrUpdate-UserCreated.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registries/createOrUpdate-UserCreated.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/delete.py index 2e4a8a929e44..bcb2da702015 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/delete.py @@ -35,6 +35,6 @@ def main(): ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registries/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registries/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/get_system_created.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/get_system_created.py index fd5b114248b7..9c18c5b460a0 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/get_system_created.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/get_system_created.py @@ -36,6 +36,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registries/get-SystemCreated.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registries/get-SystemCreated.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/get_user_created.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/get_user_created.py index bd86983df7e3..51f68a66bf19 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/get_user_created.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/get_user_created.py @@ -36,6 +36,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registries/get-UserCreated.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registries/get-UserCreated.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/list_by_subscription.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/list_by_subscription.py index d017c80aa223..b0bd1d479342 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/list_by_subscription.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/list_by_subscription.py @@ -34,6 +34,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registries/listBySubscription.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registries/listBySubscription.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/list_system_created.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/list_system_created.py index d9ef5482f599..e8ea6e9325fb 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/list_system_created.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/list_system_created.py @@ -36,6 +36,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registries/list-SystemCreated.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registries/list-SystemCreated.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/list_user_created.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/list_user_created.py index 193089df5170..26ff0acd9093 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/list_user_created.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/list_user_created.py @@ -36,6 +36,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registries/list-UserCreated.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registries/list-UserCreated.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/remove_regions.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/remove_regions.py index 75dd8c55aac0..c2ec47e8fc6b 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/remove_regions.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/remove_regions.py @@ -93,6 +93,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registries/removeRegions.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registries/removeRegions.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/update_system_created.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/update_system_created.py index bda97b4ad6d5..120f1d4271b1 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/update_system_created.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/update_system_created.py @@ -41,6 +41,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registries/update-SystemCreated.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registries/update-SystemCreated.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/update_user_created.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/update_user_created.py index d8ecdb855c20..7ad556364a8c 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/update_user_created.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registries/update_user_created.py @@ -41,6 +41,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registries/update-UserCreated.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registries/update-UserCreated.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_container/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_container/create_or_update.py index 3b5010d9d181..45bd44e8ae5f 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_container/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_container/create_or_update.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/CodeContainer/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/CodeContainer/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_container/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_container/delete.py index b72a96432c9f..34876b3d9009 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_container/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_container/delete.py @@ -36,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/CodeContainer/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/CodeContainer/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_container/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_container/get.py index c8b1ce0c2a7c..8ef30ffc7138 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_container/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_container/get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/CodeContainer/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/CodeContainer/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_container/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_container/list.py index 8f5972265e01..1595d6f8c19c 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_container/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_container/list.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/CodeContainer/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/CodeContainer/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_version/create_or_get_start_pending_upload.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_version/create_or_get_start_pending_upload.py index e3e974c61eb9..ad7c17f2c381 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_version/create_or_get_start_pending_upload.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_version/create_or_get_start_pending_upload.py @@ -34,11 +34,11 @@ def main(): registry_name="registryName", code_name="string", version="string", - body={"pendingUploadId": "string", "pendingUploadType": "TemporaryBlobReference"}, + body={"pendingUploadId": "string", "pendingUploadType": "None"}, ) print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/CodeVersion/createOrGetStartPendingUpload.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/CodeVersion/createOrGetStartPendingUpload.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_version/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_version/create_or_update.py index 1df9f062fc28..de90934fc6a7 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_version/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_version/create_or_update.py @@ -47,6 +47,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/CodeVersion/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/CodeVersion/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_version/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_version/delete.py index 3e012305c57c..c2cc06b029f9 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_version/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_version/delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/CodeVersion/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/CodeVersion/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_version/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_version/get.py index f62658b63374..773726d65cce 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_version/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_version/get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/CodeVersion/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/CodeVersion/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_version/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_version/list.py index 4453204ec59d..8f5deefacdff 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_version/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_version/list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/CodeVersion/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/CodeVersion/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_container/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_container/create_or_update.py index 6c3efd5e6a13..eb87766fee4b 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_container/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_container/create_or_update.py @@ -40,6 +40,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/ComponentContainer/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/ComponentContainer/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_container/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_container/delete.py index aea7dc0c0dea..ec1aae212da9 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_container/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_container/delete.py @@ -36,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/ComponentContainer/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/ComponentContainer/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_container/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_container/get.py index 652246d77411..a1faaa03fac4 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_container/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_container/get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/ComponentContainer/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/ComponentContainer/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_container/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_container/list.py index af03b40d6482..9c689c57e875 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_container/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_container/list.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/ComponentContainer/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/ComponentContainer/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_version/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_version/create_or_update.py index 3ef241cc2c8d..61bc5c372c3b 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_version/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_version/create_or_update.py @@ -47,6 +47,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/ComponentVersion/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/ComponentVersion/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_version/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_version/delete.py index 24e8ba968dcd..b630c79c1a30 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_version/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_version/delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/ComponentVersion/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/ComponentVersion/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_version/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_version/get.py index 906686be462b..1a6c3f72ab07 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_version/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_version/get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/ComponentVersion/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/ComponentVersion/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_version/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_version/list.py index 39d93fd0de0c..65dc5d33aa3f 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_version/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/component_version/list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/ComponentVersion/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/ComponentVersion/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_container/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_container/create_or_update.py index 2619016ab138..da276afaa0d1 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_container/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_container/create_or_update.py @@ -46,6 +46,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/DataContainer/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/DataContainer/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_container/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_container/delete.py index db9823083c38..c565e15f48b4 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_container/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_container/delete.py @@ -36,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/DataContainer/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/DataContainer/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_container/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_container/get.py index cd457746f179..dc066ed0bae6 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_container/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_container/get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/DataContainer/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/DataContainer/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_container/registry_list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_container/registry_list.py index 1b5917eb34e7..5843417f4c98 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_container/registry_list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_container/registry_list.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/DataContainer/registryList.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/DataContainer/registryList.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_version_base/create_or_get_start_pending_upload.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_version_base/create_or_get_start_pending_upload.py index 9142baba5fad..6216f1249631 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_version_base/create_or_get_start_pending_upload.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_version_base/create_or_get_start_pending_upload.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/DataVersionBase/createOrGetStartPendingUpload.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/DataVersionBase/createOrGetStartPendingUpload.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_version_base/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_version_base/create_or_update.py index 01da03f03e19..2ebc5bdeff0b 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_version_base/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_version_base/create_or_update.py @@ -50,6 +50,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/DataVersionBase/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/DataVersionBase/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_version_base/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_version_base/delete.py index d8772f5b25d5..b5f05162ad9a 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_version_base/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_version_base/delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/DataVersionBase/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/DataVersionBase/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_version_base/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_version_base/get.py index fdf5e3b6453f..8aa9f4310937 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_version_base/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_version_base/get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/DataVersionBase/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/DataVersionBase/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_version_base/registry_list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_version_base/registry_list.py index 54d19354ab1b..c67fb2a4ca0f 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_version_base/registry_list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/data_version_base/registry_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/DataVersionBase/registryList.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/DataVersionBase/registryList.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_container/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_container/create_or_update.py index 931d285c42d8..9855d5ca21bd 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_container/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_container/create_or_update.py @@ -40,6 +40,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/ModelContainer/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/ModelContainer/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_container/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_container/delete.py index 098883e9fffe..5b2ab8631453 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_container/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_container/delete.py @@ -36,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/ModelContainer/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/ModelContainer/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_container/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_container/get.py index bc742dec3e1b..03176f4afce2 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_container/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_container/get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/ModelContainer/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/ModelContainer/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_container/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_container/list.py index 75d05a51b957..e5aa5d0ee2da 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_container/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_container/list.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/ModelContainer/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/ModelContainer/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/create_or_get_start_pending_upload.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/create_or_get_start_pending_upload.py index a24b50119143..2a90ba88a083 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/create_or_get_start_pending_upload.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/create_or_get_start_pending_upload.py @@ -34,11 +34,11 @@ def main(): registry_name="registryName", model_name="string", version="string", - body={"pendingUploadId": "string", "pendingUploadType": "TemporaryBlobReference"}, + body={"pendingUploadId": "string", "pendingUploadType": "None"}, ) print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/ModelVersion/createOrGetStartPendingUpload.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/ModelVersion/createOrGetStartPendingUpload.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/create_or_update.py index a2caeebafdd8..60ae48f12a56 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/create_or_update.py @@ -49,6 +49,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/ModelVersion/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/ModelVersion/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/delete.py index 243da0a3dc5e..23f25a9d64c1 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/ModelVersion/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/ModelVersion/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/get.py index 3f718017c063..2946b7d0a50c 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/ModelVersion/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/ModelVersion/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/list.py index ca64f031391d..91c31bff1a77 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/ModelVersion/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/ModelVersion/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/package.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/package.py new file mode 100644 index 000000000000..70160e268ef6 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/model_version/package.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python package.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.registry_model_versions.begin_package( + resource_group_name="test-rg", + registry_name="my-aml-registry", + model_name="string", + version="string", + body={ + "baseEnvironmentSource": {"baseEnvironmentSourceType": "EnvironmentAsset", "resourceId": "string"}, + "environmentVariables": {"string": "string"}, + "inferencingServer": { + "codeConfiguration": {"codeId": "string", "scoringScript": "string"}, + "serverType": "AzureMLBatch", + }, + "inputs": [ + { + "inputType": "UriFile", + "mode": "Download", + "mountPath": "string", + "path": {"inputPathType": "Url", "url": "string"}, + } + ], + "modelConfiguration": {"mode": "ReadOnlyMount", "mountPath": "string"}, + "tags": {"string": "string"}, + "targetEnvironmentId": "string", + }, + ).result() + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Registry/ModelVersion/package.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/schedule/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/schedule/create_or_update.py index 351a1bcdea58..3bbb41834ae4 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/schedule/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/schedule/create_or_update.py @@ -57,6 +57,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Schedule/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Schedule/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/schedule/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/schedule/delete.py index 69da5de82fe1..14d9d1efd2c0 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/schedule/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/schedule/delete.py @@ -36,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Schedule/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Schedule/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/schedule/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/schedule/get.py index 7772e8800fa9..413a44c3fb0d 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/schedule/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/schedule/get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Schedule/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Schedule/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/schedule/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/schedule/list.py index 0610eab1d68d..f15fdebd72d6 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/schedule/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/schedule/list.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Schedule/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Schedule/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/usage/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/usage/list.py index ccff07e2fa36..c69568cd2c73 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/usage/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/usage/list.py @@ -36,6 +36,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Usage/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Usage/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/virtual_machine_size/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/virtual_machine_size/list.py index e84b655dbd1e..28d5f3e62900 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/virtual_machine_size/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/virtual_machine_size/list.py @@ -35,6 +35,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/VirtualMachineSize/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/VirtualMachineSize/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_deployment/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_deployment/create_or_update.py index 64b1e00ff40c..9d0fe1181c17 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_deployment/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_deployment/create_or_update.py @@ -66,6 +66,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/BatchDeployment/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/BatchDeployment/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_deployment/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_deployment/delete.py index e4e9b5706e42..f7a68ff86600 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_deployment/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_deployment/delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/BatchDeployment/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/BatchDeployment/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_deployment/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_deployment/get.py index d1617e809898..666551ec3a06 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_deployment/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_deployment/get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/BatchDeployment/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/BatchDeployment/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_deployment/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_deployment/list.py index f1242ba35f7e..b161f03b0297 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_deployment/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_deployment/list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/BatchDeployment/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/BatchDeployment/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_deployment/update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_deployment/update.py index 102b3fe0734c..1b8ea83b0cce 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_deployment/update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_deployment/update.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/BatchDeployment/update.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/BatchDeployment/update.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/create_or_update.py index 2074040353b4..9c9547d8c250 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/create_or_update.py @@ -50,6 +50,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/BatchEndpoint/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/BatchEndpoint/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/delete.py index 80701cf2af60..7d2f9f165a76 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/delete.py @@ -36,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/BatchEndpoint/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/BatchEndpoint/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/get.py index 990f55858aee..76c48e8ee8ae 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/BatchEndpoint/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/BatchEndpoint/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/list.py index e3e3fe7fd1dc..555a91186bab 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/list.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/BatchEndpoint/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/BatchEndpoint/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/list_keys.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/list_keys.py index cc96bcdff698..f26ddfd8b99e 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/list_keys.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/list_keys.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/BatchEndpoint/listKeys.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/BatchEndpoint/listKeys.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/update.py index f6725836e685..1ed9584aab08 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/batch_endpoint/update.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/BatchEndpoint/update.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/BatchEndpoint/update.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_container/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_container/create_or_update.py index af061e818d47..59543aedeb8f 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_container/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_container/create_or_update.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/CodeContainer/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/CodeContainer/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_container/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_container/delete.py index 7da2cb51b90a..f8db39f78db5 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_container/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_container/delete.py @@ -36,6 +36,6 @@ def main(): ) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/CodeContainer/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/CodeContainer/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_container/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_container/get.py index f56a7a87e660..a8455f5f69b4 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_container/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_container/get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/CodeContainer/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/CodeContainer/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_container/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_container/list.py index 001ce2be8571..2ef2f9d1d2e1 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_container/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_container/list.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/CodeContainer/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/CodeContainer/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_version/create_or_get_start_pending_upload.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_version/create_or_get_start_pending_upload.py index cea171e25bfb..626c08f0a40f 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_version/create_or_get_start_pending_upload.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_version/create_or_get_start_pending_upload.py @@ -34,11 +34,11 @@ def main(): workspace_name="my-aml-workspace", name="string", version="string", - body={"pendingUploadId": "string", "pendingUploadType": "None"}, + body={"pendingUploadId": "string", "pendingUploadType": "TemporaryBlobReference"}, ) print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/CodeVersion/createOrGetStartPendingUpload.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/CodeVersion/createOrGetStartPendingUpload.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_version/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_version/create_or_update.py index b894f4bda5f4..2ae3ae2d3e77 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_version/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_version/create_or_update.py @@ -47,6 +47,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/CodeVersion/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/CodeVersion/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_version/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_version/delete.py index c78c15a8ed47..d0e90d3aee79 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_version/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_version/delete.py @@ -37,6 +37,6 @@ def main(): ) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/CodeVersion/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/CodeVersion/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_version/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_version/get.py index ecae464c7718..efffab4c0560 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_version/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_version/get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/CodeVersion/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/CodeVersion/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_version/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_version/list.py index 52a9742de543..fec690679f16 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_version/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/code_version/list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/CodeVersion/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/CodeVersion/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_container/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_container/create_or_update.py index 1c6f64c469e1..6c342242a4d3 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_container/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_container/create_or_update.py @@ -40,6 +40,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/ComponentContainer/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ComponentContainer/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_container/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_container/delete.py index 8826100a38e4..ce5b947b7eca 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_container/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_container/delete.py @@ -36,6 +36,6 @@ def main(): ) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/ComponentContainer/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ComponentContainer/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_container/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_container/get.py index 3954753f23ad..94970b11085b 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_container/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_container/get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/ComponentContainer/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ComponentContainer/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_container/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_container/list.py index 45d349e330cd..815dd337b15e 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_container/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_container/list.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/ComponentContainer/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ComponentContainer/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_version/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_version/create_or_update.py index 724e7b3e919a..322353c15c10 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_version/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_version/create_or_update.py @@ -47,6 +47,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/ComponentVersion/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ComponentVersion/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_version/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_version/delete.py index e3fe021ae631..f7d7825a8434 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_version/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_version/delete.py @@ -37,6 +37,6 @@ def main(): ) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/ComponentVersion/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ComponentVersion/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_version/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_version/get.py index 5d23ed45e902..83946cd9bd57 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_version/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_version/get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/ComponentVersion/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ComponentVersion/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_version/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_version/list.py index aaecb70ad47d..1cb3d904e86d 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_version/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/component_version/list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/ComponentVersion/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ComponentVersion/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/create.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/create.py index cd04a676e0e1..8d9a315c9756 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/create.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/create.py @@ -32,7 +32,7 @@ def main(): response = client.workspaces.begin_create_or_update( resource_group_name="workspace-1234", workspace_name="testworkspace", - parameters={ + body={ "identity": { "type": "SystemAssigned,UserAssigned", "userAssignedIdentities": { @@ -76,6 +76,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/create.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/create.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_container/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_container/create_or_update.py index 98d366c715e9..8225fd3fe065 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_container/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_container/create_or_update.py @@ -45,6 +45,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/DataContainer/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/DataContainer/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_container/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_container/delete.py index a135c9b5d424..be6869368331 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_container/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_container/delete.py @@ -36,6 +36,6 @@ def main(): ) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/DataContainer/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/DataContainer/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_container/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_container/get.py index 53ef7b99be97..4819af84f772 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_container/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_container/get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/DataContainer/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/DataContainer/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_container/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_container/list.py index 3d48472b6ceb..c65334176228 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_container/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_container/list.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/DataContainer/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/DataContainer/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_version_base/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_version_base/create_or_update.py index 8705b06141a3..d15fce49b3cc 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_version_base/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_version_base/create_or_update.py @@ -48,6 +48,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/DataVersionBase/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/DataVersionBase/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_version_base/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_version_base/delete.py index 4b7c673a5232..86628998f174 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_version_base/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_version_base/delete.py @@ -37,6 +37,6 @@ def main(): ) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/DataVersionBase/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/DataVersionBase/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_version_base/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_version_base/get.py index d0e194b55997..ac3fed924f9f 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_version_base/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_version_base/get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/DataVersionBase/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/DataVersionBase/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_version_base/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_version_base/list.py index 72bb1bc4b0e0..0ec5e7b8ac7f 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_version_base/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/data_version_base/list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/DataVersionBase/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/DataVersionBase/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/delete.py index 205bca6f2e81..8fb3ec7a41d2 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/delete.py @@ -35,6 +35,6 @@ def main(): ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/diagnose.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/diagnose.py index 3077375cd454..59f67c9be995 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/diagnose.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/diagnose.py @@ -36,6 +36,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/diagnose.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/diagnose.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_container/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_container/create_or_update.py new file mode 100644 index 000000000000..ea1e5ca62d24 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_container/create_or_update.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python create_or_update.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.featureset_containers.begin_create_or_update( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + body={ + "properties": { + "description": "string", + "isArchived": False, + "properties": {"string": "string"}, + "tags": {"string": "string"}, + } + }, + ).result() + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/FeaturesetContainer/createOrUpdate.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_container/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_container/delete.py new file mode 100644 index 000000000000..d71e795f0e67 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_container/delete.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python delete.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + client.featureset_containers.begin_delete( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + ).result() + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/FeaturesetContainer/delete.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_container/get_entity.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_container/get_entity.py new file mode 100644 index 000000000000..a102551024dc --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_container/get_entity.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python get_entity.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.featureset_containers.get_entity( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + ) + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/FeaturesetContainer/getEntity.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_container/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_container/list.py new file mode 100644 index 000000000000..149ab6286a11 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_container/list.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python list.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.featureset_containers.list( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/FeaturesetContainer/list.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_version/backfill.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_version/backfill.py new file mode 100644 index 000000000000..d2c0100a7a40 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_version/backfill.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python backfill.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.featureset_versions.begin_backfill( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + version="string", + body={ + "dataAvailabilityStatus": ["None"], + "description": "string", + "displayName": "string", + "featureWindow": { + "featureWindowEnd": "2020-01-01T12:34:56.999+00:51", + "featureWindowStart": "2020-01-01T12:34:56.999+00:51", + }, + "jobId": "string", + "resource": {"instanceType": "string"}, + "sparkConfiguration": {"string": "string"}, + "tags": {"string": "string"}, + }, + ).result() + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/FeaturesetVersion/backfill.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_version/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_version/create_or_update.py new file mode 100644 index 000000000000..04837807bd0a --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_version/create_or_update.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python create_or_update.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.featureset_versions.begin_create_or_update( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + version="string", + body={ + "properties": { + "description": "string", + "entities": ["string"], + "isAnonymous": False, + "isArchived": False, + "materializationSettings": { + "notification": {"emailOn": ["JobFailed"], "emails": ["string"]}, + "resource": {"instanceType": "string"}, + "schedule": { + "endTime": "string", + "frequency": "Day", + "interval": 1, + "schedule": {"hours": [1], "minutes": [1], "monthDays": [1], "weekDays": ["Monday"]}, + "startTime": "string", + "timeZone": "string", + "triggerType": "Recurrence", + }, + "sparkConfiguration": {"string": "string"}, + "storeType": "Online", + }, + "properties": {"string": "string"}, + "specification": {"path": "string"}, + "stage": "string", + "tags": {"string": "string"}, + } + }, + ).result() + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/FeaturesetVersion/createOrUpdate.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_version/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_version/delete.py new file mode 100644 index 000000000000..675ab8510fb5 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_version/delete.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python delete.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + client.featureset_versions.begin_delete( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + version="string", + ).result() + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/FeaturesetVersion/delete.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_version/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_version/get.py new file mode 100644 index 000000000000..982d564710b8 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_version/get.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python get.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.featureset_versions.get( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + version="string", + ) + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/FeaturesetVersion/get.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_version/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_version/list.py new file mode 100644 index 000000000000..2119a6f8e5b6 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featureset_version/list.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python list.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.featureset_versions.list( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/FeaturesetVersion/list.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_container/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_container/create_or_update.py new file mode 100644 index 000000000000..e34358e8f3be --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_container/create_or_update.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python create_or_update.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.featurestore_entity_containers.begin_create_or_update( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + body={ + "properties": { + "description": "string", + "isArchived": False, + "properties": {"string": "string"}, + "tags": {"string": "string"}, + } + }, + ).result() + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/FeaturestoreEntityContainer/createOrUpdate.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_container/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_container/delete.py new file mode 100644 index 000000000000..89c2850c48e8 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_container/delete.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python delete.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + client.featurestore_entity_containers.begin_delete( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + ).result() + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/FeaturestoreEntityContainer/delete.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_container/get_entity.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_container/get_entity.py new file mode 100644 index 000000000000..da14088ac564 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_container/get_entity.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python get_entity.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.featurestore_entity_containers.get_entity( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + ) + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/FeaturestoreEntityContainer/getEntity.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_container/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_container/list.py new file mode 100644 index 000000000000..bda03debb692 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_container/list.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python list.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.featurestore_entity_containers.list( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/FeaturestoreEntityContainer/list.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_version/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_version/create_or_update.py new file mode 100644 index 000000000000..725244f9e683 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_version/create_or_update.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python create_or_update.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.featurestore_entity_versions.begin_create_or_update( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + version="string", + body={ + "properties": { + "description": "string", + "indexColumns": [{"columnName": "string", "dataType": "Datetime"}], + "isAnonymous": False, + "isArchived": False, + "properties": {"string": "string"}, + "tags": {"string": "string"}, + } + }, + ).result() + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/FeaturestoreEntityVersion/createOrUpdate.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_version/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_version/delete.py new file mode 100644 index 000000000000..76562bb88f1b --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_version/delete.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python delete.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + client.featurestore_entity_versions.begin_delete( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + version="string", + ).result() + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/FeaturestoreEntityVersion/delete.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_version/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_version/get.py new file mode 100644 index 000000000000..1a79454327c8 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_version/get.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python get.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.featurestore_entity_versions.get( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + version="string", + ) + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/FeaturestoreEntityVersion/get.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_version/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_version/list.py new file mode 100644 index 000000000000..def5c900c951 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/featurestore_entity_version/list.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python list.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.featurestore_entity_versions.list( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/FeaturestoreEntityVersion/list.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/get.py index 4c2f399080e3..f7d5bd9a711f 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/get.py @@ -36,6 +36,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_endpoint/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_endpoint/create_or_update.py new file mode 100644 index 000000000000..0146fb6452ab --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_endpoint/create_or_update.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python create_or_update.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.inference_endpoints.begin_create_or_update( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + pool_name="string", + group_name="string", + name="string", + body={ + "kind": "string", + "location": "string", + "properties": {"authMode": "AAD", "description": "string", "properties": {"string": "string"}}, + "tags": {}, + }, + ).result() + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/InferenceEndpoint/createOrUpdate.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_endpoint/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_endpoint/delete.py new file mode 100644 index 000000000000..9c54603c1ee6 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_endpoint/delete.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python delete.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + client.inference_endpoints.begin_delete( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + pool_name="string", + group_name="string", + name="string", + ).result() + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/InferenceEndpoint/delete.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_endpoint/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_endpoint/get.py new file mode 100644 index 000000000000..324af4471ab7 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_endpoint/get.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python get.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.inference_endpoints.get( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + pool_name="string", + group_name="string", + name="string", + ) + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/InferenceEndpoint/get.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_endpoint/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_endpoint/list.py new file mode 100644 index 000000000000..b6529ac0aec9 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_endpoint/list.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python list.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.inference_endpoints.list( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + pool_name="string", + group_name="string", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/InferenceEndpoint/list.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_endpoint/update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_endpoint/update.py new file mode 100644 index 000000000000..3715899d674b --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_endpoint/update.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python update.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.inference_endpoints.begin_update( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + pool_name="string", + group_name="string", + name="string", + body={"tags": {}}, + ).result() + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/InferenceEndpoint/update.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_group/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_group/create_or_update.py new file mode 100644 index 000000000000..8773a79ec80c --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_group/create_or_update.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python create_or_update.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.inference_groups.begin_create_or_update( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + pool_name="string", + name="string", + body={ + "kind": "string", + "location": "string", + "properties": { + "bonusExtraCapacity": 1, + "data": "string", + "description": "string", + "priority": 1, + "properties": {"string": "string"}, + }, + "sku": {"capacity": 1, "family": "string", "name": "string", "size": "string", "tier": "Basic"}, + "tags": {}, + }, + ).result() + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/InferenceGroup/createOrUpdate.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_group/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_group/delete.py new file mode 100644 index 000000000000..adf6809ee241 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_group/delete.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python delete.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + client.inference_groups.begin_delete( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + pool_name="string", + name="string", + ).result() + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/InferenceGroup/delete.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_group/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_group/get.py new file mode 100644 index 000000000000..4186c9a206f7 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_group/get.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python get.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.inference_groups.get( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + pool_name="string", + name="string", + ) + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/InferenceGroup/get.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_group/get_status.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_group/get_status.py new file mode 100644 index 000000000000..2e977b81c9c8 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_group/get_status.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python get_status.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.inference_groups.get_status( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + pool_name="string", + name="string", + ) + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/InferenceGroup/getStatus.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_group/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_group/list.py new file mode 100644 index 000000000000..6c96469e3dc7 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_group/list.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python list.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.inference_groups.list( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + pool_name="string", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/InferenceGroup/list.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_group/list_skus.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_group/list_skus.py new file mode 100644 index 000000000000..2b9f0050df0e --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_group/list_skus.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python list_skus.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.inference_groups.list_skus( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + pool_name="string", + name="string", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/InferenceGroup/listSkus.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_group/update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_group/update.py new file mode 100644 index 000000000000..d37f986ce64f --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_group/update.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python update.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.inference_groups.begin_update( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + pool_name="string", + name="string", + body={ + "sku": {"capacity": 1, "family": "string", "name": "string", "size": "string", "tier": "Premium"}, + "tags": {}, + }, + ).result() + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/InferenceGroup/update.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_pool/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_pool/create_or_update.py new file mode 100644 index 000000000000..ea9a27433ba2 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_pool/create_or_update.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python create_or_update.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.inference_pools.begin_create_or_update( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + body={ + "identity": {"type": "None", "userAssignedIdentities": {"string": {}}}, + "kind": "string", + "location": "string", + "properties": { + "codeConfiguration": {"codeId": "string", "scoringScript": "string"}, + "description": "string", + "environmentConfiguration": { + "environmentId": "string", + "environmentVariables": {"string": "string"}, + "livenessProbe": { + "failureThreshold": 1, + "initialDelay": "PT5M", + "period": "PT5M", + "successThreshold": 1, + "timeout": "PT5M", + }, + "readinessProbe": { + "failureThreshold": 1, + "initialDelay": "PT5M", + "period": "PT5M", + "successThreshold": 1, + "timeout": "PT5M", + }, + "startupProbe": { + "failureThreshold": 1, + "initialDelay": "PT5M", + "period": "PT5M", + "successThreshold": 1, + "timeout": "PT5M", + }, + }, + "modelConfiguration": {"modelId": "string"}, + "nodeSkuType": "string", + "properties": {"string": "string"}, + "requestConfiguration": {"maxConcurrentRequestsPerInstance": 1, "requestTimeout": "PT5M"}, + }, + "sku": {"capacity": 1, "family": "string", "name": "string", "size": "string", "tier": "Free"}, + "tags": {}, + }, + ).result() + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/InferencePool/createOrUpdate.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_pool/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_pool/delete.py new file mode 100644 index 000000000000..380954478ff5 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_pool/delete.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python delete.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + client.inference_pools.begin_delete( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + ).result() + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/InferencePool/delete.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_pool/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_pool/get.py new file mode 100644 index 000000000000..21d42b329bab --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_pool/get.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python get.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.inference_pools.get( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + ) + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/InferencePool/get.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_pool/get_status.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_pool/get_status.py new file mode 100644 index 000000000000..cedd373d6bc3 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_pool/get_status.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python get_status.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.inference_pools.get_status( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + ) + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/InferencePool/getStatus.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_pool/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_pool/list.py new file mode 100644 index 000000000000..77a6ecfb8502 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_pool/list.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python list.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.inference_pools.list( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/InferencePool/list.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_pool/list_skus.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_pool/list_skus.py new file mode 100644 index 000000000000..3e8323d7955b --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_pool/list_skus.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python list_skus.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.inference_pools.list_skus( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/InferencePool/listSkus.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_pool/update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_pool/update.py new file mode 100644 index 000000000000..1fc869ce7275 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/inference_pool/update.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python update.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.inference_pools.begin_update( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + body={ + "identity": {"type": "UserAssigned", "userAssignedIdentities": {"string": {}}}, + "sku": {"capacity": 1, "family": "string", "name": "string", "size": "string", "tier": "Standard"}, + "tags": {}, + }, + ).result() + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/InferencePool/update.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/list_by_resource_group.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/list_by_resource_group.py index 48ad1779487f..19e092892b27 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/list_by_resource_group.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/list_by_resource_group.py @@ -36,6 +36,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/listByResourceGroup.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/listByResourceGroup.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/list_by_subscription.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/list_by_subscription.py index 921bfb2b59f6..850b52f9f736 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/list_by_subscription.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/list_by_subscription.py @@ -34,6 +34,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/listBySubscription.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/listBySubscription.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/list_keys.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/list_keys.py index 9e0e183e60ef..7e0a2e11beee 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/list_keys.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/list_keys.py @@ -36,6 +36,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/listKeys.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/listKeys.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/list_notebook_access_token.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/list_notebook_access_token.py index d180f3ee30c2..9bfeb626f21d 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/list_notebook_access_token.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/list_notebook_access_token.py @@ -36,6 +36,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/listNotebookAccessToken.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/listNotebookAccessToken.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/list_storage_account_keys.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/list_storage_account_keys.py index dbc963513d84..d4f3f0fcfa09 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/list_storage_account_keys.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/list_storage_account_keys.py @@ -36,6 +36,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/listStorageAccountKeys.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/listStorageAccountKeys.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_container/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_container/create_or_update.py index 3c3a3ce3aaf6..968e2bbc8bcb 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_container/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_container/create_or_update.py @@ -40,6 +40,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/ModelContainer/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ModelContainer/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_container/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_container/delete.py index 7a5a7947d5a4..dc77d70ba40f 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_container/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_container/delete.py @@ -36,6 +36,6 @@ def main(): ) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/ModelContainer/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ModelContainer/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_container/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_container/get.py index 9a5d2fcc742a..f47ce1afeca8 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_container/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_container/get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/ModelContainer/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ModelContainer/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_container/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_container/list.py index 988fdad83e14..2e1a8dc08add 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_container/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_container/list.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/ModelContainer/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ModelContainer/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_version/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_version/create_or_update.py index 0d9996be1fbb..63da78d8188a 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_version/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_version/create_or_update.py @@ -49,6 +49,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/ModelVersion/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ModelVersion/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_version/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_version/delete.py index ddf28eee5525..00c293b75c14 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_version/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_version/delete.py @@ -37,6 +37,6 @@ def main(): ) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/ModelVersion/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ModelVersion/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_version/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_version/get.py index 3322b8491634..bc99f97de710 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_version/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_version/get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/ModelVersion/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ModelVersion/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_version/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_version/list.py index b6fe6cc09d99..8e28a98e2038 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_version/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_version/list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/ModelVersion/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ModelVersion/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_version/package.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_version/package.py new file mode 100644 index 000000000000..e25a8b42e505 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/model_version/package.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python package.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.model_versions.begin_package( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + version="string", + body={ + "baseEnvironmentSource": {"baseEnvironmentSourceType": "EnvironmentAsset", "resourceId": "string"}, + "environmentVariables": {"string": "string"}, + "inferencingServer": { + "codeConfiguration": {"codeId": "string", "scoringScript": "string"}, + "serverType": "AzureMLBatch", + }, + "inputs": [ + { + "inputType": "UriFile", + "mode": "Download", + "mountPath": "string", + "path": {"inputPathType": "Url", "url": "string"}, + } + ], + "modelConfiguration": {"mode": "ReadOnlyMount", "mountPath": "string"}, + "tags": {"string": "string"}, + "targetEnvironmentId": "string", + }, + ).result() + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ModelVersion/package.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_deployment/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_deployment/delete.py index 209f1dc99b4c..6fe7b2d0ef8b 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_deployment/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_deployment/delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/OnlineDeployment/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/OnlineDeployment/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/create_or_update.py index 0ad2062a3eef..a95de5ca05b0 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/create_or_update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/create_or_update.py @@ -51,6 +51,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/OnlineEndpoint/createOrUpdate.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/OnlineEndpoint/createOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/delete.py index c00b39bfcf47..4d6ec5ffcffe 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/delete.py @@ -36,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/OnlineEndpoint/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/OnlineEndpoint/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/get.py index c22efccb2c1e..ab1e1d689b9f 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/OnlineEndpoint/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/OnlineEndpoint/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/get_token.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/get_token.py index f721177eb41c..67eed0d75ac5 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/get_token.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/get_token.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/OnlineEndpoint/getToken.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/OnlineEndpoint/getToken.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/list.py index 741567b8ceed..fec612712f56 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/list.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/OnlineEndpoint/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/OnlineEndpoint/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/list_keys.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/list_keys.py index 550945711c4b..a9f6937a74c7 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/list_keys.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/list_keys.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/OnlineEndpoint/listKeys.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/OnlineEndpoint/listKeys.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/regenerate_keys.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/regenerate_keys.py index f6f874cb710c..d7817b93f345 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/regenerate_keys.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/regenerate_keys.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/OnlineEndpoint/regenerateKeys.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/OnlineEndpoint/regenerateKeys.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/update.py index b1c25f72b06b..a2a780f5d736 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/online_endpoint/update.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/OnlineEndpoint/update.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/OnlineEndpoint/update.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/operations_list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/operations_list.py index 91510c53d5b5..59db37497ee1 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/operations_list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/operations_list.py @@ -34,6 +34,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/operationsList.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/operationsList.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/resync_keys.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/resync_keys.py index f332a4120926..9610b8503e02 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/resync_keys.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/resync_keys.py @@ -35,6 +35,6 @@ def main(): ).result() -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/resyncKeys.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/resyncKeys.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/serverless_endpoint/create_or_update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/serverless_endpoint/create_or_update.py new file mode 100644 index 000000000000..3cae152f787a --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/serverless_endpoint/create_or_update.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python create_or_update.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.serverless_endpoints.begin_create_or_update( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + body={ + "identity": {"type": "SystemAssigned", "userAssignedIdentities": {"string": {}}}, + "kind": "string", + "location": "string", + "properties": {"modelProfile": {"modelUri": "string"}}, + "sku": {"capacity": 1, "family": "string", "name": "string", "size": "string", "tier": "Standard"}, + "tags": {}, + }, + ).result() + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ServerlessEndpoint/createOrUpdate.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/serverless_endpoint/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/serverless_endpoint/delete.py new file mode 100644 index 000000000000..56a8fa2cda23 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/serverless_endpoint/delete.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python delete.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + client.serverless_endpoints.begin_delete( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + ).result() + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ServerlessEndpoint/delete.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/serverless_endpoint/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/serverless_endpoint/get.py new file mode 100644 index 000000000000..b217efd37f85 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/serverless_endpoint/get.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python get.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.serverless_endpoints.get( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + ) + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ServerlessEndpoint/get.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/serverless_endpoint/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/serverless_endpoint/list.py new file mode 100644 index 000000000000..04dfb28cc630 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/serverless_endpoint/list.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python list.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.serverless_endpoints.list( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ServerlessEndpoint/list.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/serverless_endpoint/list_keys.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/serverless_endpoint/list_keys.py new file mode 100644 index 000000000000..cb5e400eb01d --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/serverless_endpoint/list_keys.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python list_keys.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.serverless_endpoints.list_keys( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + ) + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ServerlessEndpoint/listKeys.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/serverless_endpoint/regenerate_keys.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/serverless_endpoint/regenerate_keys.py new file mode 100644 index 000000000000..4ae7d927dca3 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/serverless_endpoint/regenerate_keys.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python regenerate_keys.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.serverless_endpoints.begin_regenerate_keys( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + body={"keyType": "Primary", "keyValue": "string"}, + ).result() + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ServerlessEndpoint/regenerateKeys.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/serverless_endpoint/update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/serverless_endpoint/update.py new file mode 100644 index 000000000000..aa9d403da79e --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/serverless_endpoint/update.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python update.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.serverless_endpoints.begin_update( + resource_group_name="test-rg", + workspace_name="my-aml-workspace", + name="string", + body={"tags": {}}, + ).result() + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/ServerlessEndpoint/update.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/update.py index f8b3fae5c4cc..1bb4bc1c0e3e 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/update.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace/update.py @@ -32,7 +32,7 @@ def main(): response = client.workspaces.begin_update( resource_group_name="workspace-1234", workspace_name="testworkspace", - parameters={ + body={ "properties": { "description": "new description", "friendlyName": "New friendly name", @@ -43,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Workspace/update.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/Workspace/update.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_connection/create.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_connection/create.py index 110d4862cc3a..00ebc35675c5 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_connection/create.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_connection/create.py @@ -33,11 +33,10 @@ def main(): resource_group_name="resourceGroup-1", workspace_name="workspace-1", connection_name="connection-1", - parameters={"properties": {"authType": "None", "category": "ContainerRegistry", "target": "www.facebook.com"}}, ) print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/WorkspaceConnection/create.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/WorkspaceConnection/create.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_connection/delete.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_connection/delete.py index c154fbe498b6..b03762a45fa9 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_connection/delete.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_connection/delete.py @@ -36,6 +36,6 @@ def main(): ) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/WorkspaceConnection/delete.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/WorkspaceConnection/delete.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_connection/get.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_connection/get.py index 98aa005623e0..0da0a48fbe1f 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_connection/get.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_connection/get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/WorkspaceConnection/get.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/WorkspaceConnection/get.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_connection/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_connection/list.py index 3f4299cf1809..575522b2c35d 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_connection/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_connection/list.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/WorkspaceConnection/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/WorkspaceConnection/list.json if __name__ == "__main__": main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_connection/list_secrets.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_connection/list_secrets.py new file mode 100644 index 000000000000..36caad8e20b2 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_connection/list_secrets.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python list_secrets.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.workspace_connections.list_secrets( + resource_group_name="test-rg", + workspace_name="aml-workspace-name", + connection_name="some_string", + ) + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/WorkspaceConnection/listSecrets.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_connection/update.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_connection/update.py new file mode 100644 index 000000000000..024ff2266ce4 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_connection/update.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-machinelearningservices +# USAGE + python update.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = MachineLearningServicesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.workspace_connections.update( + resource_group_name="test-rg", + workspace_name="aml-workspace-name", + connection_name="some_string", + ) + print(response) + + +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/WorkspaceConnection/update.json +if __name__ == "__main__": + main() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_feature/list.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_feature/list.py index 3f1b7fa8da64..2b17054878e5 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_feature/list.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/workspace_feature/list.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/WorkspaceFeature/list.json +# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/examples/WorkspaceFeature/list.json if __name__ == "__main__": main()