From df9c4b91d175e982cf927efee4295a0edf867490 Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Fri, 27 Apr 2018 10:13:06 -0700 Subject: [PATCH] [AutoPR batchai/resource-manager] DO NOT MERGE. BatchAI. Specification for new 2018-05-01 API version (#2423) * Generated from 9d2dd94a3fef664168ddda7bc83bb471700e1b23 BatchAI. SDK 2018-05-01 Changes: - Added first class support for Horovod framework - Added first class support for custom mpi commands - Introduced workspaces - Introduces experiments as a grouping mechanism for related jobs - Moved jobs from the top level into workpace/experiment - Moved clusters and file servers from the top level into workspaces - Removing filter and select parameters as they are not suppported yet - Removing FileServer type as only NFS is supporter - Removed type of output directories - server doesn't support it - Removed 'createNew' attribute of output directory because there is no use-case scenario for having it equal to false. * Generated from ad43bbe835e6a244af9cd39cd0ab6be757e9b42c Addressed CR feedback * Generated from 1ed70938835b3689a0effeb616d3a2c2589d2fc3 BatchAI. Get rid of booleans and added readonly attributes to where applicable --- .../batchai/batch_ai_management_client.py | 36 +- .../azure/mgmt/batchai/models/__init__.py | 76 ++- .../mgmt/batchai/models/batch_ai_error.py | 29 +- .../mgmt/batchai/models/batch_ai_error_py3.py | 31 +- .../batch_ai_management_client_enums.py | 29 +- .../azure/mgmt/batchai/models/cluster.py | 11 +- .../azure/mgmt/batchai/models/cluster_py3.py | 13 +- ...clusters_list_by_resource_group_options.py | 10 - ...ters_list_by_resource_group_options_py3.py | 12 +- .../clusters_list_by_workspace_options.py | 29 + .../clusters_list_by_workspace_options_py3.py | 29 + .../batchai/models/clusters_list_options.py | 10 - .../models/clusters_list_options_py3.py | 12 +- .../batchai/models/custom_mpi_settings.py | 41 ++ .../batchai/models/custom_mpi_settings_py3.py | 41 ++ .../azure/mgmt/batchai/models/experiment.py | 61 ++ .../mgmt/batchai/models/experiment_paged.py | 27 + .../mgmt/batchai/models/experiment_py3.py | 61 ++ .../experiments_list_by_workspace_options.py | 29 + ...periments_list_by_workspace_options_py3.py | 29 + .../azure/mgmt/batchai/models/file.py | 45 +- .../azure/mgmt/batchai/models/file_py3.py | 47 +- .../azure/mgmt/batchai/models/file_server.py | 2 +- .../mgmt/batchai/models/file_server_py3.py | 2 +- ..._servers_list_by_resource_group_options.py | 10 - ...vers_list_by_resource_group_options_py3.py | 12 +- .../file_servers_list_by_workspace_options.py | 29 + ...e_servers_list_by_workspace_options_py3.py | 29 + .../models/file_servers_list_options.py | 10 - .../models/file_servers_list_options_py3.py | 12 +- .../mgmt/batchai/models/horovod_settings.py | 50 ++ .../batchai/models/horovod_settings_py3.py | 50 ++ .../azure/mgmt/batchai/models/job.py | 66 +- .../batchai/models/job_create_parameters.py | 34 +- .../models/job_create_parameters_py3.py | 36 +- .../models/job_properties_execution_info.py | 38 +- .../job_properties_execution_info_py3.py | 40 +- .../azure/mgmt/batchai/models/job_py3.py | 66 +- .../models/jobs_list_by_experiment_options.py | 29 + .../jobs_list_by_experiment_options_py3.py | 29 + .../batchai/models/key_vault_key_reference.py | 40 -- .../models/key_vault_key_reference_py3.py | 40 -- .../mgmt/batchai/models/local_data_volume.py | 41 -- .../batchai/models/local_data_volume_py3.py | 41 -- .../batchai/models/manual_scale_settings.py | 4 +- .../models/manual_scale_settings_py3.py | 4 +- .../mgmt/batchai/models/mount_settings.py | 5 - .../mgmt/batchai/models/mount_settings_py3.py | 7 +- .../mgmt/batchai/models/node_state_counts.py | 51 +- .../batchai/models/node_state_counts_py3.py | 53 +- .../azure/mgmt/batchai/models/operation.py | 20 +- .../mgmt/batchai/models/operation_display.py | 34 +- .../batchai/models/operation_display_py3.py | 36 +- .../mgmt/batchai/models/operation_py3.py | 22 +- .../mgmt/batchai/models/output_directory.py | 14 - .../batchai/models/output_directory_py3.py | 16 +- .../mgmt/batchai/models/proxy_resource.py | 45 ++ .../mgmt/batchai/models/proxy_resource_py3.py | 45 ++ .../models/remote_login_information.py | 29 +- .../models/remote_login_information_py3.py | 31 +- .../azure/mgmt/batchai/models/setup_task.py | 12 +- .../mgmt/batchai/models/setup_task_py3.py | 14 +- .../azure/mgmt/batchai/models/usage.py | 37 +- .../azure/mgmt/batchai/models/usage_name.py | 20 +- .../mgmt/batchai/models/usage_name_py3.py | 22 +- .../azure/mgmt/batchai/models/usage_py3.py | 39 +- .../azure/mgmt/batchai/models/workspace.py | 69 ++ .../models/workspace_create_parameters.py | 38 ++ .../models/workspace_create_parameters_py3.py | 38 ++ .../mgmt/batchai/models/workspace_paged.py | 27 + .../mgmt/batchai/models/workspace_py3.py | 69 ++ ...kspaces_list_by_resource_group_options.py} | 14 +- ...ces_list_by_resource_group_options_py3.py} | 16 +- ..._options.py => workspaces_list_options.py} | 14 +- ..._py3.py => workspaces_list_options_py3.py} | 16 +- .../azure/mgmt/batchai/operations/__init__.py | 12 +- .../batchai/operations/clusters_operations.py | 357 +++++++---- .../operations/experiments_operations.py | 400 ++++++++++++ .../operations/file_servers_operations.py | 333 ++++++---- .../batchai/operations/jobs_operations.py | 593 +++++++++--------- .../mgmt/batchai/operations/operations.py | 4 +- ...age_operations.py => usages_operations.py} | 8 +- .../operations/workspaces_operations.py | 453 +++++++++++++ .../azure/mgmt/batchai/version.py | 2 +- 84 files changed, 3074 insertions(+), 1363 deletions(-) create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_by_workspace_options.py create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_by_workspace_options_py3.py create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/custom_mpi_settings.py create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/custom_mpi_settings_py3.py create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/experiment.py create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/experiment_paged.py create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/experiment_py3.py create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/experiments_list_by_workspace_options.py create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/experiments_list_by_workspace_options_py3.py create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_by_workspace_options.py create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_by_workspace_options_py3.py create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/horovod_settings.py create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/horovod_settings_py3.py create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/jobs_list_by_experiment_options.py create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/jobs_list_by_experiment_options_py3.py delete mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/key_vault_key_reference.py delete mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/key_vault_key_reference_py3.py delete mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/local_data_volume.py delete mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/local_data_volume_py3.py create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/proxy_resource.py create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/proxy_resource_py3.py create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/workspace.py create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/workspace_create_parameters.py create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/workspace_create_parameters_py3.py create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/workspace_paged.py create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/models/workspace_py3.py rename azure-mgmt-batchai/azure/mgmt/batchai/models/{jobs_list_by_resource_group_options.py => workspaces_list_by_resource_group_options.py} (61%) rename azure-mgmt-batchai/azure/mgmt/batchai/models/{jobs_list_by_resource_group_options_py3.py => workspaces_list_by_resource_group_options_py3.py} (58%) rename azure-mgmt-batchai/azure/mgmt/batchai/models/{jobs_list_options.py => workspaces_list_options.py} (62%) rename azure-mgmt-batchai/azure/mgmt/batchai/models/{jobs_list_options_py3.py => workspaces_list_options_py3.py} (58%) create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/operations/experiments_operations.py rename azure-mgmt-batchai/azure/mgmt/batchai/operations/{usage_operations.py => usages_operations.py} (96%) create mode 100644 azure-mgmt-batchai/azure/mgmt/batchai/operations/workspaces_operations.py diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/batch_ai_management_client.py b/azure-mgmt-batchai/azure/mgmt/batchai/batch_ai_management_client.py index 73b667f05463..d0a30680b63a 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/batch_ai_management_client.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/batch_ai_management_client.py @@ -9,15 +9,17 @@ # regenerated. # -------------------------------------------------------------------------- -from msrest.service_client import ServiceClient +from msrest.service_client import SDKClient from msrest import Serializer, Deserializer from msrestazure import AzureConfiguration from .version import VERSION from .operations.operations import Operations -from .operations.usage_operations import UsageOperations +from .operations.usages_operations import UsagesOperations from .operations.clusters_operations import ClustersOperations -from .operations.jobs_operations import JobsOperations from .operations.file_servers_operations import FileServersOperations +from .operations.workspaces_operations import WorkspacesOperations +from .operations.experiments_operations import ExperimentsOperations +from .operations.jobs_operations import JobsOperations from . import models @@ -53,7 +55,7 @@ def __init__( self.subscription_id = subscription_id -class BatchAIManagementClient(object): +class BatchAIManagementClient(SDKClient): """The Azure BatchAI Management API. :ivar config: Configuration for client. @@ -61,14 +63,18 @@ class BatchAIManagementClient(object): :ivar operations: Operations operations :vartype operations: azure.mgmt.batchai.operations.Operations - :ivar usage: Usage operations - :vartype usage: azure.mgmt.batchai.operations.UsageOperations + :ivar usages: Usages operations + :vartype usages: azure.mgmt.batchai.operations.UsagesOperations :ivar clusters: Clusters operations :vartype clusters: azure.mgmt.batchai.operations.ClustersOperations - :ivar jobs: Jobs operations - :vartype jobs: azure.mgmt.batchai.operations.JobsOperations :ivar file_servers: FileServers operations :vartype file_servers: azure.mgmt.batchai.operations.FileServersOperations + :ivar workspaces: Workspaces operations + :vartype workspaces: azure.mgmt.batchai.operations.WorkspacesOperations + :ivar experiments: Experiments operations + :vartype experiments: azure.mgmt.batchai.operations.ExperimentsOperations + :ivar jobs: Jobs operations + :vartype jobs: azure.mgmt.batchai.operations.JobsOperations :param credentials: Credentials needed for the client to connect to Azure. :type credentials: :mod:`A msrestazure Credentials @@ -82,20 +88,24 @@ def __init__( self, credentials, subscription_id, base_url=None): self.config = BatchAIManagementClientConfiguration(credentials, subscription_id, base_url) - self._client = ServiceClient(self.config.credentials, self.config) + super(BatchAIManagementClient, self).__init__(self.config.credentials, self.config) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} - self.api_version = '2018-03-01' + self.api_version = '2018-05-01' self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self.operations = Operations( self._client, self.config, self._serialize, self._deserialize) - self.usage = UsageOperations( + self.usages = UsagesOperations( self._client, self.config, self._serialize, self._deserialize) self.clusters = ClustersOperations( self._client, self.config, self._serialize, self._deserialize) - self.jobs = JobsOperations( - self._client, self.config, self._serialize, self._deserialize) self.file_servers = FileServersOperations( self._client, self.config, self._serialize, self._deserialize) + self.workspaces = WorkspacesOperations( + self._client, self.config, self._serialize, self._deserialize) + self.experiments = ExperimentsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.jobs = JobsOperations( + self._client, self.config, self._serialize, self._deserialize) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/__init__.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/__init__.py index bd21ae6cb5ba..a14013a9adf2 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/__init__.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/__init__.py @@ -19,7 +19,6 @@ from .mount_settings_py3 import MountSettings from .file_server_py3 import FileServer from .key_vault_secret_reference_py3 import KeyVaultSecretReference - from .key_vault_key_reference_py3 import KeyVaultKeyReference from .file_server_create_parameters_py3 import FileServerCreateParameters from .manual_scale_settings_py3 import ManualScaleSettings from .auto_scale_settings_py3 import AutoScaleSettings @@ -54,6 +53,8 @@ from .caffe2_settings_py3 import Caffe2Settings from .chainer_settings_py3 import ChainerSettings from .custom_toolkit_settings_py3 import CustomToolkitSettings + from .custom_mpi_settings_py3 import CustomMpiSettings + from .horovod_settings_py3 import HorovodSettings from .job_preparation_py3 import JobPreparation from .input_directory_py3 import InputDirectory from .output_directory_py3 import OutputDirectory @@ -65,16 +66,23 @@ from .remote_login_information_py3 import RemoteLoginInformation from .file_py3 import File from .resource_py3 import Resource - from .local_data_volume_py3 import LocalDataVolume + from .proxy_resource_py3 import ProxyResource from .operation_display_py3 import OperationDisplay from .operation_py3 import Operation + from .workspace_py3 import Workspace + from .workspace_create_parameters_py3 import WorkspaceCreateParameters + from .experiment_py3 import Experiment from .clusters_list_options_py3 import ClustersListOptions from .clusters_list_by_resource_group_options_py3 import ClustersListByResourceGroupOptions - from .jobs_list_options_py3 import JobsListOptions - from .jobs_list_by_resource_group_options_py3 import JobsListByResourceGroupOptions - from .jobs_list_output_files_options_py3 import JobsListOutputFilesOptions + from .clusters_list_by_workspace_options_py3 import ClustersListByWorkspaceOptions from .file_servers_list_options_py3 import FileServersListOptions from .file_servers_list_by_resource_group_options_py3 import FileServersListByResourceGroupOptions + from .file_servers_list_by_workspace_options_py3 import FileServersListByWorkspaceOptions + from .workspaces_list_options_py3 import WorkspacesListOptions + from .workspaces_list_by_resource_group_options_py3 import WorkspacesListByResourceGroupOptions + from .experiments_list_by_workspace_options_py3 import ExperimentsListByWorkspaceOptions + from .jobs_list_by_experiment_options_py3 import JobsListByExperimentOptions + from .jobs_list_output_files_options_py3 import JobsListOutputFilesOptions except (SyntaxError, ImportError): from .usage_name import UsageName from .usage import Usage @@ -85,7 +93,6 @@ from .mount_settings import MountSettings from .file_server import FileServer from .key_vault_secret_reference import KeyVaultSecretReference - from .key_vault_key_reference import KeyVaultKeyReference from .file_server_create_parameters import FileServerCreateParameters from .manual_scale_settings import ManualScaleSettings from .auto_scale_settings import AutoScaleSettings @@ -120,6 +127,8 @@ from .caffe2_settings import Caffe2Settings from .chainer_settings import ChainerSettings from .custom_toolkit_settings import CustomToolkitSettings + from .custom_mpi_settings import CustomMpiSettings + from .horovod_settings import HorovodSettings from .job_preparation import JobPreparation from .input_directory import InputDirectory from .output_directory import OutputDirectory @@ -131,35 +140,45 @@ from .remote_login_information import RemoteLoginInformation from .file import File from .resource import Resource - from .local_data_volume import LocalDataVolume + from .proxy_resource import ProxyResource from .operation_display import OperationDisplay from .operation import Operation + from .workspace import Workspace + from .workspace_create_parameters import WorkspaceCreateParameters + from .experiment import Experiment from .clusters_list_options import ClustersListOptions from .clusters_list_by_resource_group_options import ClustersListByResourceGroupOptions - from .jobs_list_options import JobsListOptions - from .jobs_list_by_resource_group_options import JobsListByResourceGroupOptions - from .jobs_list_output_files_options import JobsListOutputFilesOptions + from .clusters_list_by_workspace_options import ClustersListByWorkspaceOptions from .file_servers_list_options import FileServersListOptions from .file_servers_list_by_resource_group_options import FileServersListByResourceGroupOptions + from .file_servers_list_by_workspace_options import FileServersListByWorkspaceOptions + from .workspaces_list_options import WorkspacesListOptions + from .workspaces_list_by_resource_group_options import WorkspacesListByResourceGroupOptions + from .experiments_list_by_workspace_options import ExperimentsListByWorkspaceOptions + from .jobs_list_by_experiment_options import JobsListByExperimentOptions + from .jobs_list_output_files_options import JobsListOutputFilesOptions from .operation_paged import OperationPaged from .usage_paged import UsagePaged -from .remote_login_information_paged import RemoteLoginInformationPaged from .cluster_paged import ClusterPaged +from .remote_login_information_paged import RemoteLoginInformationPaged +from .file_server_paged import FileServerPaged +from .workspace_paged import WorkspacePaged +from .experiment_paged import ExperimentPaged from .job_paged import JobPaged from .file_paged import FilePaged -from .file_server_paged import FileServerPaged from .batch_ai_management_client_enums import ( + UsageUnit, CachingType, StorageAccountType, - FileServerType, FileServerProvisioningState, VmPriority, DeallocationOption, ProvisioningState, AllocationState, - OutputType, + JobPriority, ToolType, ExecutionState, + FileType, ) __all__ = [ @@ -172,7 +191,6 @@ 'MountSettings', 'FileServer', 'KeyVaultSecretReference', - 'KeyVaultKeyReference', 'FileServerCreateParameters', 'ManualScaleSettings', 'AutoScaleSettings', @@ -207,6 +225,8 @@ 'Caffe2Settings', 'ChainerSettings', 'CustomToolkitSettings', + 'CustomMpiSettings', + 'HorovodSettings', 'JobPreparation', 'InputDirectory', 'OutputDirectory', @@ -218,32 +238,42 @@ 'RemoteLoginInformation', 'File', 'Resource', - 'LocalDataVolume', + 'ProxyResource', 'OperationDisplay', 'Operation', + 'Workspace', + 'WorkspaceCreateParameters', + 'Experiment', 'ClustersListOptions', 'ClustersListByResourceGroupOptions', - 'JobsListOptions', - 'JobsListByResourceGroupOptions', - 'JobsListOutputFilesOptions', + 'ClustersListByWorkspaceOptions', 'FileServersListOptions', 'FileServersListByResourceGroupOptions', + 'FileServersListByWorkspaceOptions', + 'WorkspacesListOptions', + 'WorkspacesListByResourceGroupOptions', + 'ExperimentsListByWorkspaceOptions', + 'JobsListByExperimentOptions', + 'JobsListOutputFilesOptions', 'OperationPaged', 'UsagePaged', - 'RemoteLoginInformationPaged', 'ClusterPaged', + 'RemoteLoginInformationPaged', + 'FileServerPaged', + 'WorkspacePaged', + 'ExperimentPaged', 'JobPaged', 'FilePaged', - 'FileServerPaged', + 'UsageUnit', 'CachingType', 'StorageAccountType', - 'FileServerType', 'FileServerProvisioningState', 'VmPriority', 'DeallocationOption', 'ProvisioningState', 'AllocationState', - 'OutputType', + 'JobPriority', 'ToolType', 'ExecutionState', + 'FileType', ] diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/batch_ai_error.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/batch_ai_error.py index a039ffe05739..6848ef698eb8 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/batch_ai_error.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/batch_ai_error.py @@ -15,16 +15,25 @@ class BatchAIError(Model): """An error response from the Batch AI service. - :param code: An identifier for the error. Codes are invariant and are + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar code: An identifier for the error. Codes are invariant and are intended to be consumed programmatically. - :type code: str - :param message: A message describing the error, intended to be suitable - for display in a user interface. - :type message: str - :param details: A list of additional details about the error. - :type details: list[~azure.mgmt.batchai.models.NameValuePair] + :vartype code: str + :ivar message: A message describing the error, intended to be suitable for + display in a user interface. + :vartype message: str + :ivar details: A list of additional details about the error. + :vartype details: list[~azure.mgmt.batchai.models.NameValuePair] """ + _validation = { + 'code': {'readonly': True}, + 'message': {'readonly': True}, + 'details': {'readonly': True}, + } + _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, @@ -33,6 +42,6 @@ class BatchAIError(Model): def __init__(self, **kwargs): super(BatchAIError, self).__init__(**kwargs) - self.code = kwargs.get('code', None) - self.message = kwargs.get('message', None) - self.details = kwargs.get('details', None) + self.code = None + self.message = None + self.details = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/batch_ai_error_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/batch_ai_error_py3.py index 445ab3a8c083..9b2fff26d7de 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/batch_ai_error_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/batch_ai_error_py3.py @@ -15,24 +15,33 @@ class BatchAIError(Model): """An error response from the Batch AI service. - :param code: An identifier for the error. Codes are invariant and are + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar code: An identifier for the error. Codes are invariant and are intended to be consumed programmatically. - :type code: str - :param message: A message describing the error, intended to be suitable - for display in a user interface. - :type message: str - :param details: A list of additional details about the error. - :type details: list[~azure.mgmt.batchai.models.NameValuePair] + :vartype code: str + :ivar message: A message describing the error, intended to be suitable for + display in a user interface. + :vartype message: str + :ivar details: A list of additional details about the error. + :vartype details: list[~azure.mgmt.batchai.models.NameValuePair] """ + _validation = { + 'code': {'readonly': True}, + 'message': {'readonly': True}, + 'details': {'readonly': True}, + } + _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'details': {'key': 'details', 'type': '[NameValuePair]'}, } - def __init__(self, *, code: str=None, message: str=None, details=None, **kwargs) -> None: + def __init__(self, **kwargs) -> None: super(BatchAIError, self).__init__(**kwargs) - self.code = code - self.message = message - self.details = details + self.code = None + self.message = None + self.details = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/batch_ai_management_client_enums.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/batch_ai_management_client_enums.py index 07172747786a..5eb2b6a67bfe 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/batch_ai_management_client_enums.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/batch_ai_management_client_enums.py @@ -12,6 +12,11 @@ from enum import Enum +class UsageUnit(str, Enum): + + count = "Count" + + class CachingType(str, Enum): none = "none" @@ -25,12 +30,6 @@ class StorageAccountType(str, Enum): premium_lrs = "Premium_LRS" -class FileServerType(str, Enum): - - nfs = "nfs" - glusterfs = "glusterfs" - - class FileServerProvisioningState(str, Enum): creating = "creating" @@ -51,7 +50,6 @@ class DeallocationOption(str, Enum): requeue = "requeue" terminate = "terminate" waitforjobcompletion = "waitforjobcompletion" - unknown = "unknown" class ProvisioningState(str, Enum): @@ -68,12 +66,11 @@ class AllocationState(str, Enum): resizing = "resizing" -class OutputType(str, Enum): +class JobPriority(str, Enum): - model = "model" - logs = "logs" - summary = "summary" - custom = "custom" + low = "low" + normal = "normal" + high = "high" class ToolType(str, Enum): @@ -83,6 +80,8 @@ class ToolType(str, Enum): caffe = "caffe" caffe2 = "caffe2" chainer = "chainer" + horovod = "horovod" + mpi = "mpi" custom = "custom" @@ -93,3 +92,9 @@ class ExecutionState(str, Enum): terminating = "terminating" succeeded = "succeeded" failed = "failed" + + +class FileType(str, Enum): + + file = "file" + directory = "directory" diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/cluster.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/cluster.py index fed384116b6d..a4fb9077b26b 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/cluster.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/cluster.py @@ -81,10 +81,10 @@ class Cluster(Resource): :ivar allocation_state_transition_time: The time at which the cluster entered its current allocation state. :vartype allocation_state_transition_time: datetime - :param errors: Contains details of various errors on the cluster including + :ivar errors: Contains details of various errors on the cluster including resize and node setup task. This element contains all the errors encountered by various compute nodes during node setup. - :type errors: list[~azure.mgmt.batchai.models.BatchAIError] + :vartype errors: list[~azure.mgmt.batchai.models.BatchAIError] :ivar current_node_count: The number of compute nodes currently assigned to the cluster. :vartype current_node_count: int @@ -103,6 +103,7 @@ class Cluster(Resource): 'provisioning_state_transition_time': {'readonly': True}, 'allocation_state': {'readonly': True}, 'allocation_state_transition_time': {'readonly': True}, + 'errors': {'readonly': True}, 'current_node_count': {'readonly': True}, 'node_state_counts': {'readonly': True}, } @@ -121,9 +122,9 @@ class Cluster(Resource): 'user_account_settings': {'key': 'properties.userAccountSettings', 'type': 'UserAccountSettings'}, 'subnet': {'key': 'properties.subnet', 'type': 'ResourceId'}, 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'ProvisioningState'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'provisioning_state_transition_time': {'key': 'properties.provisioningStateTransitionTime', 'type': 'iso-8601'}, - 'allocation_state': {'key': 'properties.allocationState', 'type': 'AllocationState'}, + 'allocation_state': {'key': 'properties.allocationState', 'type': 'str'}, 'allocation_state_transition_time': {'key': 'properties.allocationStateTransitionTime', 'type': 'iso-8601'}, 'errors': {'key': 'properties.errors', 'type': '[BatchAIError]'}, 'current_node_count': {'key': 'properties.currentNodeCount', 'type': 'int'}, @@ -144,6 +145,6 @@ def __init__(self, **kwargs): self.provisioning_state_transition_time = None self.allocation_state = None self.allocation_state_transition_time = None - self.errors = kwargs.get('errors', None) + self.errors = None self.current_node_count = None self.node_state_counts = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/cluster_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/cluster_py3.py index 1bfa5e556405..0d6a3b6d3f9b 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/cluster_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/cluster_py3.py @@ -81,10 +81,10 @@ class Cluster(Resource): :ivar allocation_state_transition_time: The time at which the cluster entered its current allocation state. :vartype allocation_state_transition_time: datetime - :param errors: Contains details of various errors on the cluster including + :ivar errors: Contains details of various errors on the cluster including resize and node setup task. This element contains all the errors encountered by various compute nodes during node setup. - :type errors: list[~azure.mgmt.batchai.models.BatchAIError] + :vartype errors: list[~azure.mgmt.batchai.models.BatchAIError] :ivar current_node_count: The number of compute nodes currently assigned to the cluster. :vartype current_node_count: int @@ -103,6 +103,7 @@ class Cluster(Resource): 'provisioning_state_transition_time': {'readonly': True}, 'allocation_state': {'readonly': True}, 'allocation_state_transition_time': {'readonly': True}, + 'errors': {'readonly': True}, 'current_node_count': {'readonly': True}, 'node_state_counts': {'readonly': True}, } @@ -121,16 +122,16 @@ class Cluster(Resource): 'user_account_settings': {'key': 'properties.userAccountSettings', 'type': 'UserAccountSettings'}, 'subnet': {'key': 'properties.subnet', 'type': 'ResourceId'}, 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'ProvisioningState'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'provisioning_state_transition_time': {'key': 'properties.provisioningStateTransitionTime', 'type': 'iso-8601'}, - 'allocation_state': {'key': 'properties.allocationState', 'type': 'AllocationState'}, + 'allocation_state': {'key': 'properties.allocationState', 'type': 'str'}, 'allocation_state_transition_time': {'key': 'properties.allocationStateTransitionTime', 'type': 'iso-8601'}, 'errors': {'key': 'properties.errors', 'type': '[BatchAIError]'}, 'current_node_count': {'key': 'properties.currentNodeCount', 'type': 'int'}, 'node_state_counts': {'key': 'properties.nodeStateCounts', 'type': 'NodeStateCounts'}, } - def __init__(self, *, vm_size: str=None, vm_priority="dedicated", scale_settings=None, virtual_machine_configuration=None, node_setup=None, user_account_settings=None, subnet=None, errors=None, **kwargs) -> None: + def __init__(self, *, vm_size: str=None, vm_priority="dedicated", scale_settings=None, virtual_machine_configuration=None, node_setup=None, user_account_settings=None, subnet=None, **kwargs) -> None: super(Cluster, self).__init__(**kwargs) self.vm_size = vm_size self.vm_priority = vm_priority @@ -144,6 +145,6 @@ def __init__(self, *, vm_size: str=None, vm_priority="dedicated", scale_settings self.provisioning_state_transition_time = None self.allocation_state = None self.allocation_state_transition_time = None - self.errors = errors + self.errors = None self.current_node_count = None self.node_state_counts = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_by_resource_group_options.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_by_resource_group_options.py index e59e4b194e75..4a00e118e75b 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_by_resource_group_options.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_by_resource_group_options.py @@ -15,25 +15,15 @@ class ClustersListByResourceGroupOptions(Model): """Additional parameters for list_by_resource_group operation. - :param filter: An OData $filter clause.. Used to filter results that are - returned in the GET respnose. - :type filter: str - :param select: An OData $select clause. Used to select the properties to - be returned in the GET respnose. - :type select: str :param max_results: The maximum number of items to return in the response. A maximum of 1000 files can be returned. Default value: 1000 . :type max_results: int """ _attribute_map = { - 'filter': {'key': '', 'type': 'str'}, - 'select': {'key': '', 'type': 'str'}, 'max_results': {'key': '', 'type': 'int'}, } def __init__(self, **kwargs): super(ClustersListByResourceGroupOptions, self).__init__(**kwargs) - self.filter = kwargs.get('filter', None) - self.select = kwargs.get('select', None) self.max_results = kwargs.get('max_results', 1000) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_by_resource_group_options_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_by_resource_group_options_py3.py index 809941f37570..8d410d11b220 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_by_resource_group_options_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_by_resource_group_options_py3.py @@ -15,25 +15,15 @@ class ClustersListByResourceGroupOptions(Model): """Additional parameters for list_by_resource_group operation. - :param filter: An OData $filter clause.. Used to filter results that are - returned in the GET respnose. - :type filter: str - :param select: An OData $select clause. Used to select the properties to - be returned in the GET respnose. - :type select: str :param max_results: The maximum number of items to return in the response. A maximum of 1000 files can be returned. Default value: 1000 . :type max_results: int """ _attribute_map = { - 'filter': {'key': '', 'type': 'str'}, - 'select': {'key': '', 'type': 'str'}, 'max_results': {'key': '', 'type': 'int'}, } - def __init__(self, *, filter: str=None, select: str=None, max_results: int=1000, **kwargs) -> None: + def __init__(self, *, max_results: int=1000, **kwargs) -> None: super(ClustersListByResourceGroupOptions, self).__init__(**kwargs) - self.filter = filter - self.select = select self.max_results = max_results diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_by_workspace_options.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_by_workspace_options.py new file mode 100644 index 000000000000..232f29ad677e --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_by_workspace_options.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ClustersListByWorkspaceOptions(Model): + """Additional parameters for list_by_workspace operation. + + :param max_results: The maximum number of items to return in the response. + A maximum of 1000 files can be returned. Default value: 1000 . + :type max_results: int + """ + + _attribute_map = { + 'max_results': {'key': '', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(ClustersListByWorkspaceOptions, self).__init__(**kwargs) + self.max_results = kwargs.get('max_results', 1000) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_by_workspace_options_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_by_workspace_options_py3.py new file mode 100644 index 000000000000..12975ebb70a8 --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_by_workspace_options_py3.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ClustersListByWorkspaceOptions(Model): + """Additional parameters for list_by_workspace operation. + + :param max_results: The maximum number of items to return in the response. + A maximum of 1000 files can be returned. Default value: 1000 . + :type max_results: int + """ + + _attribute_map = { + 'max_results': {'key': '', 'type': 'int'}, + } + + def __init__(self, *, max_results: int=1000, **kwargs) -> None: + super(ClustersListByWorkspaceOptions, self).__init__(**kwargs) + self.max_results = max_results diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_options.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_options.py index 34f107d88e20..20292e1bde85 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_options.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_options.py @@ -15,25 +15,15 @@ class ClustersListOptions(Model): """Additional parameters for list operation. - :param filter: An OData $filter clause.. Used to filter results that are - returned in the GET respnose. - :type filter: str - :param select: An OData $select clause. Used to select the properties to - be returned in the GET respnose. - :type select: str :param max_results: The maximum number of items to return in the response. A maximum of 1000 files can be returned. Default value: 1000 . :type max_results: int """ _attribute_map = { - 'filter': {'key': '', 'type': 'str'}, - 'select': {'key': '', 'type': 'str'}, 'max_results': {'key': '', 'type': 'int'}, } def __init__(self, **kwargs): super(ClustersListOptions, self).__init__(**kwargs) - self.filter = kwargs.get('filter', None) - self.select = kwargs.get('select', None) self.max_results = kwargs.get('max_results', 1000) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_options_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_options_py3.py index c2d207cae1f4..66ab235d44e7 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_options_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/clusters_list_options_py3.py @@ -15,25 +15,15 @@ class ClustersListOptions(Model): """Additional parameters for list operation. - :param filter: An OData $filter clause.. Used to filter results that are - returned in the GET respnose. - :type filter: str - :param select: An OData $select clause. Used to select the properties to - be returned in the GET respnose. - :type select: str :param max_results: The maximum number of items to return in the response. A maximum of 1000 files can be returned. Default value: 1000 . :type max_results: int """ _attribute_map = { - 'filter': {'key': '', 'type': 'str'}, - 'select': {'key': '', 'type': 'str'}, 'max_results': {'key': '', 'type': 'int'}, } - def __init__(self, *, filter: str=None, select: str=None, max_results: int=1000, **kwargs) -> None: + def __init__(self, *, max_results: int=1000, **kwargs) -> None: super(ClustersListOptions, self).__init__(**kwargs) - self.filter = filter - self.select = select self.max_results = max_results diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/custom_mpi_settings.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/custom_mpi_settings.py new file mode 100644 index 000000000000..ce018fa7a8c4 --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/custom_mpi_settings.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CustomMpiSettings(Model): + """Specifies the settings for a custom tool kit job. + + All required parameters must be populated in order to send to Azure. + + :param command_line: Required. The program and program command line + parameters to be executed by mpi runtime. + :type command_line: str + :param process_count: Number of processes parameter that is passed to MPI + runtime. The default value for this property is equal to nodeCount + property + :type process_count: int + """ + + _validation = { + 'command_line': {'required': True}, + } + + _attribute_map = { + 'command_line': {'key': 'commandLine', 'type': 'str'}, + 'process_count': {'key': 'processCount', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(CustomMpiSettings, self).__init__(**kwargs) + self.command_line = kwargs.get('command_line', None) + self.process_count = kwargs.get('process_count', None) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/custom_mpi_settings_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/custom_mpi_settings_py3.py new file mode 100644 index 000000000000..3900faa46076 --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/custom_mpi_settings_py3.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CustomMpiSettings(Model): + """Specifies the settings for a custom tool kit job. + + All required parameters must be populated in order to send to Azure. + + :param command_line: Required. The program and program command line + parameters to be executed by mpi runtime. + :type command_line: str + :param process_count: Number of processes parameter that is passed to MPI + runtime. The default value for this property is equal to nodeCount + property + :type process_count: int + """ + + _validation = { + 'command_line': {'required': True}, + } + + _attribute_map = { + 'command_line': {'key': 'commandLine', 'type': 'str'}, + 'process_count': {'key': 'processCount', 'type': 'int'}, + } + + def __init__(self, *, command_line: str, process_count: int=None, **kwargs) -> None: + super(CustomMpiSettings, self).__init__(**kwargs) + self.command_line = command_line + self.process_count = process_count diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/experiment.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/experiment.py new file mode 100644 index 000000000000..f0bbe532e690 --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/experiment.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .proxy_resource import ProxyResource + + +class Experiment(ProxyResource): + """Contains information about the experiment. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The ID of the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar creation_time: Time when the Experiment was created. + :vartype creation_time: datetime + :ivar provisioning_state: The provisioned state of the experiment. + Possible values include: 'creating', 'succeeded', 'failed', 'deleting' + :vartype provisioning_state: str or + ~azure.mgmt.batchai.models.ProvisioningState + :ivar provisioning_state_transition_time: The time at which the experiment + entered its current provisioning state. The time at which the experiment + entered its current provisioning state. + :vartype provisioning_state_transition_time: datetime + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'creation_time': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'provisioning_state_transition_time': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'provisioning_state_transition_time': {'key': 'properties.provisioningStateTransitionTime', 'type': 'iso-8601'}, + } + + def __init__(self, **kwargs): + super(Experiment, self).__init__(**kwargs) + self.creation_time = None + self.provisioning_state = None + self.provisioning_state_transition_time = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/experiment_paged.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/experiment_paged.py new file mode 100644 index 000000000000..3bce26bd1693 --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/experiment_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class ExperimentPaged(Paged): + """ + A paging container for iterating over a list of :class:`Experiment ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[Experiment]'} + } + + def __init__(self, *args, **kwargs): + + super(ExperimentPaged, self).__init__(*args, **kwargs) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/experiment_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/experiment_py3.py new file mode 100644 index 000000000000..99b7142ba505 --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/experiment_py3.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .proxy_resource import ProxyResource + + +class Experiment(ProxyResource): + """Contains information about the experiment. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The ID of the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + :ivar creation_time: Time when the Experiment was created. + :vartype creation_time: datetime + :ivar provisioning_state: The provisioned state of the experiment. + Possible values include: 'creating', 'succeeded', 'failed', 'deleting' + :vartype provisioning_state: str or + ~azure.mgmt.batchai.models.ProvisioningState + :ivar provisioning_state_transition_time: The time at which the experiment + entered its current provisioning state. The time at which the experiment + entered its current provisioning state. + :vartype provisioning_state_transition_time: datetime + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'creation_time': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'provisioning_state_transition_time': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'provisioning_state_transition_time': {'key': 'properties.provisioningStateTransitionTime', 'type': 'iso-8601'}, + } + + def __init__(self, **kwargs) -> None: + super(Experiment, self).__init__(**kwargs) + self.creation_time = None + self.provisioning_state = None + self.provisioning_state_transition_time = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/experiments_list_by_workspace_options.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/experiments_list_by_workspace_options.py new file mode 100644 index 000000000000..bc252206fcdb --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/experiments_list_by_workspace_options.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ExperimentsListByWorkspaceOptions(Model): + """Additional parameters for list_by_workspace operation. + + :param max_results: The maximum number of items to return in the response. + A maximum of 1000 files can be returned. Default value: 1000 . + :type max_results: int + """ + + _attribute_map = { + 'max_results': {'key': '', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(ExperimentsListByWorkspaceOptions, self).__init__(**kwargs) + self.max_results = kwargs.get('max_results', 1000) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/experiments_list_by_workspace_options_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/experiments_list_by_workspace_options_py3.py new file mode 100644 index 000000000000..02aa5e0fce9c --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/experiments_list_by_workspace_options_py3.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ExperimentsListByWorkspaceOptions(Model): + """Additional parameters for list_by_workspace operation. + + :param max_results: The maximum number of items to return in the response. + A maximum of 1000 files can be returned. Default value: 1000 . + :type max_results: int + """ + + _attribute_map = { + 'max_results': {'key': '', 'type': 'int'}, + } + + def __init__(self, *, max_results: int=1000, **kwargs) -> None: + super(ExperimentsListByWorkspaceOptions, self).__init__(**kwargs) + self.max_results = max_results diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/file.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/file.py index b6068ecf0d07..09bdaf9fa0a6 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/file.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/file.py @@ -15,30 +15,35 @@ class File(Model): """Properties of the file or directory. - All required parameters must be populated in order to send to Azure. - - :param name: Required. Name of the file. - :type name: str - :param is_directory: Required. Indicates if the file is a directory. - :type is_directory: bool - :param download_url: Will contain an URL to download the corresponding + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: Name of the file. + :vartype name: str + :ivar file_type: Contains information about file type. Possible values + include: 'file', 'directory' + :vartype file_type: str or ~azure.mgmt.batchai.models.FileType + :ivar download_url: Will contain an URL to download the corresponding file. The downloadUrl is not returned for directories. - :type download_url: str - :param last_modified: The time at which the file was last modified. The + :vartype download_url: str + :ivar last_modified: The time at which the file was last modified. The time at which the file was last modified. - :type last_modified: datetime - :param content_length: The file size. The file size. - :type content_length: long + :vartype last_modified: datetime + :ivar content_length: The file size. The file size. + :vartype content_length: long """ _validation = { - 'name': {'required': True}, - 'is_directory': {'required': True}, + 'name': {'readonly': True}, + 'file_type': {'readonly': True}, + 'download_url': {'readonly': True}, + 'last_modified': {'readonly': True}, + 'content_length': {'readonly': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, - 'is_directory': {'key': 'isDirectory', 'type': 'bool'}, + 'file_type': {'key': 'fileType', 'type': 'str'}, 'download_url': {'key': 'downloadUrl', 'type': 'str'}, 'last_modified': {'key': 'properties.lastModified', 'type': 'iso-8601'}, 'content_length': {'key': 'properties.contentLength', 'type': 'long'}, @@ -46,8 +51,8 @@ class File(Model): def __init__(self, **kwargs): super(File, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.is_directory = kwargs.get('is_directory', None) - self.download_url = kwargs.get('download_url', None) - self.last_modified = kwargs.get('last_modified', None) - self.content_length = kwargs.get('content_length', None) + self.name = None + self.file_type = None + self.download_url = None + self.last_modified = None + self.content_length = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/file_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/file_py3.py index 898c80e2ab1f..c23264138a90 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/file_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/file_py3.py @@ -15,39 +15,44 @@ class File(Model): """Properties of the file or directory. - All required parameters must be populated in order to send to Azure. - - :param name: Required. Name of the file. - :type name: str - :param is_directory: Required. Indicates if the file is a directory. - :type is_directory: bool - :param download_url: Will contain an URL to download the corresponding + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: Name of the file. + :vartype name: str + :ivar file_type: Contains information about file type. Possible values + include: 'file', 'directory' + :vartype file_type: str or ~azure.mgmt.batchai.models.FileType + :ivar download_url: Will contain an URL to download the corresponding file. The downloadUrl is not returned for directories. - :type download_url: str - :param last_modified: The time at which the file was last modified. The + :vartype download_url: str + :ivar last_modified: The time at which the file was last modified. The time at which the file was last modified. - :type last_modified: datetime - :param content_length: The file size. The file size. - :type content_length: long + :vartype last_modified: datetime + :ivar content_length: The file size. The file size. + :vartype content_length: long """ _validation = { - 'name': {'required': True}, - 'is_directory': {'required': True}, + 'name': {'readonly': True}, + 'file_type': {'readonly': True}, + 'download_url': {'readonly': True}, + 'last_modified': {'readonly': True}, + 'content_length': {'readonly': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, - 'is_directory': {'key': 'isDirectory', 'type': 'bool'}, + 'file_type': {'key': 'fileType', 'type': 'str'}, 'download_url': {'key': 'downloadUrl', 'type': 'str'}, 'last_modified': {'key': 'properties.lastModified', 'type': 'iso-8601'}, 'content_length': {'key': 'properties.contentLength', 'type': 'long'}, } - def __init__(self, *, name: str, is_directory: bool, download_url: str=None, last_modified=None, content_length: int=None, **kwargs) -> None: + def __init__(self, **kwargs) -> None: super(File, self).__init__(**kwargs) - self.name = name - self.is_directory = is_directory - self.download_url = download_url - self.last_modified = last_modified - self.content_length = content_length + self.name = None + self.file_type = None + self.download_url = None + self.last_modified = None + self.content_length = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/file_server.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/file_server.py index a78ae04fcab9..2c4d33411b2a 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/file_server.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/file_server.py @@ -84,7 +84,7 @@ class FileServer(Resource): 'mount_settings': {'key': 'properties.mountSettings', 'type': 'MountSettings'}, 'provisioning_state_transition_time': {'key': 'properties.provisioningStateTransitionTime', 'type': 'iso-8601'}, 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'FileServerProvisioningState'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, } def __init__(self, **kwargs): diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/file_server_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/file_server_py3.py index 6aae42ce1e25..bf2486163701 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/file_server_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/file_server_py3.py @@ -84,7 +84,7 @@ class FileServer(Resource): 'mount_settings': {'key': 'properties.mountSettings', 'type': 'MountSettings'}, 'provisioning_state_transition_time': {'key': 'properties.provisioningStateTransitionTime', 'type': 'iso-8601'}, 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'FileServerProvisioningState'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, } def __init__(self, *, vm_size: str=None, ssh_configuration=None, data_disks=None, subnet=None, **kwargs) -> None: diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_by_resource_group_options.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_by_resource_group_options.py index dc9da22b86a3..5dc32ce5a604 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_by_resource_group_options.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_by_resource_group_options.py @@ -15,25 +15,15 @@ class FileServersListByResourceGroupOptions(Model): """Additional parameters for list_by_resource_group operation. - :param filter: An OData $filter clause.. Used to filter results that are - returned in the GET respnose. - :type filter: str - :param select: An OData $select clause. Used to select the properties to - be returned in the GET respnose. - :type select: str :param max_results: The maximum number of items to return in the response. A maximum of 1000 files can be returned. Default value: 1000 . :type max_results: int """ _attribute_map = { - 'filter': {'key': '', 'type': 'str'}, - 'select': {'key': '', 'type': 'str'}, 'max_results': {'key': '', 'type': 'int'}, } def __init__(self, **kwargs): super(FileServersListByResourceGroupOptions, self).__init__(**kwargs) - self.filter = kwargs.get('filter', None) - self.select = kwargs.get('select', None) self.max_results = kwargs.get('max_results', 1000) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_by_resource_group_options_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_by_resource_group_options_py3.py index fc1946f7b61d..fa3a7628bc0a 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_by_resource_group_options_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_by_resource_group_options_py3.py @@ -15,25 +15,15 @@ class FileServersListByResourceGroupOptions(Model): """Additional parameters for list_by_resource_group operation. - :param filter: An OData $filter clause.. Used to filter results that are - returned in the GET respnose. - :type filter: str - :param select: An OData $select clause. Used to select the properties to - be returned in the GET respnose. - :type select: str :param max_results: The maximum number of items to return in the response. A maximum of 1000 files can be returned. Default value: 1000 . :type max_results: int """ _attribute_map = { - 'filter': {'key': '', 'type': 'str'}, - 'select': {'key': '', 'type': 'str'}, 'max_results': {'key': '', 'type': 'int'}, } - def __init__(self, *, filter: str=None, select: str=None, max_results: int=1000, **kwargs) -> None: + def __init__(self, *, max_results: int=1000, **kwargs) -> None: super(FileServersListByResourceGroupOptions, self).__init__(**kwargs) - self.filter = filter - self.select = select self.max_results = max_results diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_by_workspace_options.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_by_workspace_options.py new file mode 100644 index 000000000000..05a9a42951bf --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_by_workspace_options.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FileServersListByWorkspaceOptions(Model): + """Additional parameters for list_by_workspace operation. + + :param max_results: The maximum number of items to return in the response. + A maximum of 1000 files can be returned. Default value: 1000 . + :type max_results: int + """ + + _attribute_map = { + 'max_results': {'key': '', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(FileServersListByWorkspaceOptions, self).__init__(**kwargs) + self.max_results = kwargs.get('max_results', 1000) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_by_workspace_options_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_by_workspace_options_py3.py new file mode 100644 index 000000000000..34a80387a94f --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_by_workspace_options_py3.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FileServersListByWorkspaceOptions(Model): + """Additional parameters for list_by_workspace operation. + + :param max_results: The maximum number of items to return in the response. + A maximum of 1000 files can be returned. Default value: 1000 . + :type max_results: int + """ + + _attribute_map = { + 'max_results': {'key': '', 'type': 'int'}, + } + + def __init__(self, *, max_results: int=1000, **kwargs) -> None: + super(FileServersListByWorkspaceOptions, self).__init__(**kwargs) + self.max_results = max_results diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_options.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_options.py index c321c859568e..2a59d29874f4 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_options.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_options.py @@ -15,25 +15,15 @@ class FileServersListOptions(Model): """Additional parameters for list operation. - :param filter: An OData $filter clause.. Used to filter results that are - returned in the GET respnose. - :type filter: str - :param select: An OData $select clause. Used to select the properties to - be returned in the GET respnose. - :type select: str :param max_results: The maximum number of items to return in the response. A maximum of 1000 files can be returned. Default value: 1000 . :type max_results: int """ _attribute_map = { - 'filter': {'key': '', 'type': 'str'}, - 'select': {'key': '', 'type': 'str'}, 'max_results': {'key': '', 'type': 'int'}, } def __init__(self, **kwargs): super(FileServersListOptions, self).__init__(**kwargs) - self.filter = kwargs.get('filter', None) - self.select = kwargs.get('select', None) self.max_results = kwargs.get('max_results', 1000) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_options_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_options_py3.py index 8a12532636cf..9d59d047f6f4 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_options_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/file_servers_list_options_py3.py @@ -15,25 +15,15 @@ class FileServersListOptions(Model): """Additional parameters for list operation. - :param filter: An OData $filter clause.. Used to filter results that are - returned in the GET respnose. - :type filter: str - :param select: An OData $select clause. Used to select the properties to - be returned in the GET respnose. - :type select: str :param max_results: The maximum number of items to return in the response. A maximum of 1000 files can be returned. Default value: 1000 . :type max_results: int """ _attribute_map = { - 'filter': {'key': '', 'type': 'str'}, - 'select': {'key': '', 'type': 'str'}, 'max_results': {'key': '', 'type': 'int'}, } - def __init__(self, *, filter: str=None, select: str=None, max_results: int=1000, **kwargs) -> None: + def __init__(self, *, max_results: int=1000, **kwargs) -> None: super(FileServersListOptions, self).__init__(**kwargs) - self.filter = filter - self.select = select self.max_results = max_results diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/horovod_settings.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/horovod_settings.py new file mode 100644 index 000000000000..cadc3076625d --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/horovod_settings.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class HorovodSettings(Model): + """Specifies the settings for Chainer job. + + All required parameters must be populated in order to send to Azure. + + :param python_script_file_path: Required. The path and file name of the + python script to execute the job. + :type python_script_file_path: str + :param python_interpreter_path: The path to python interpreter. + :type python_interpreter_path: str + :param command_line_args: Command line arguments that needs to be passed + to the python script. + :type command_line_args: str + :param process_count: Number of processes parameter that is passed to MPI + runtime. The default value for this property is equal to nodeCount + property + :type process_count: int + """ + + _validation = { + 'python_script_file_path': {'required': True}, + } + + _attribute_map = { + 'python_script_file_path': {'key': 'pythonScriptFilePath', 'type': 'str'}, + 'python_interpreter_path': {'key': 'pythonInterpreterPath', 'type': 'str'}, + 'command_line_args': {'key': 'commandLineArgs', 'type': 'str'}, + 'process_count': {'key': 'processCount', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(HorovodSettings, self).__init__(**kwargs) + self.python_script_file_path = kwargs.get('python_script_file_path', None) + self.python_interpreter_path = kwargs.get('python_interpreter_path', None) + self.command_line_args = kwargs.get('command_line_args', None) + self.process_count = kwargs.get('process_count', None) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/horovod_settings_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/horovod_settings_py3.py new file mode 100644 index 000000000000..ca62be37f128 --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/horovod_settings_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class HorovodSettings(Model): + """Specifies the settings for Chainer job. + + All required parameters must be populated in order to send to Azure. + + :param python_script_file_path: Required. The path and file name of the + python script to execute the job. + :type python_script_file_path: str + :param python_interpreter_path: The path to python interpreter. + :type python_interpreter_path: str + :param command_line_args: Command line arguments that needs to be passed + to the python script. + :type command_line_args: str + :param process_count: Number of processes parameter that is passed to MPI + runtime. The default value for this property is equal to nodeCount + property + :type process_count: int + """ + + _validation = { + 'python_script_file_path': {'required': True}, + } + + _attribute_map = { + 'python_script_file_path': {'key': 'pythonScriptFilePath', 'type': 'str'}, + 'python_interpreter_path': {'key': 'pythonInterpreterPath', 'type': 'str'}, + 'command_line_args': {'key': 'commandLineArgs', 'type': 'str'}, + 'process_count': {'key': 'processCount', 'type': 'int'}, + } + + def __init__(self, *, python_script_file_path: str, python_interpreter_path: str=None, command_line_args: str=None, process_count: int=None, **kwargs) -> None: + super(HorovodSettings, self).__init__(**kwargs) + self.python_script_file_path = python_script_file_path + self.python_interpreter_path = python_interpreter_path + self.command_line_args = command_line_args + self.process_count = process_count diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/job.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/job.py index 6be2033e14a0..3bcc83908d51 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/job.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/job.py @@ -9,32 +9,25 @@ # regenerated. # -------------------------------------------------------------------------- -from .resource import Resource +from .proxy_resource import ProxyResource -class Job(Resource): - """Contains information about the job. +class Job(ProxyResource): + """Contains information about a Job. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: The ID of the resource + :ivar id: The ID of the resource. :vartype id: str - :ivar name: The name of the resource + :ivar name: The name of the resource. :vartype name: str - :ivar type: The type of the resource + :ivar type: The type of the resource. :vartype type: str - :ivar location: The location of the resource - :vartype location: str - :ivar tags: The tags of the resource - :vartype tags: dict[str, str] - :param experiment_name: Describe the experiment information of the job - :type experiment_name: str :param priority: Priority associated with the job. Priority associated - with the job. Priority values can range from -1000 to 1000, with -1000 - being the lowest priority and 1000 being the highest priority. The default - value is 0. Default value: 0 . - :type priority: int + with the job. Possible values include: 'low', 'normal', 'high'. Default + value: "normal" . + :type priority: str or ~azure.mgmt.batchai.models.JobPriority :param cluster: Specifies the Id of the cluster on which this job will run. :type cluster: ~azure.mgmt.batchai.models.ResourceId @@ -43,13 +36,13 @@ class Job(Resource): unmouted after the job completion. The volumes will be mounted at location specified by $AZ_BATCHAI_JOB_MOUNT_ROOT environment variable. :type mount_volumes: ~azure.mgmt.batchai.models.MountVolumes - :param job_output_directory_path_segment: A segment of job's output + :ivar job_output_directory_path_segment: A segment of job's output directories path created by BatchAI. Batch AI creates job's output directories under an unique path to avoid conflicts between jobs. This value contains a path segment generated by Batch AI to make the path unique and can be used to find the output directory on the node or mounted filesystem. - :type job_output_directory_path_segment: str + :vartype job_output_directory_path_segment: str :param node_count: Number of compute nodes to run the job on. The job will be gang scheduled on that many compute nodes :type node_count: int @@ -59,8 +52,9 @@ class Job(Resource): on the VM. :type container_settings: ~azure.mgmt.batchai.models.ContainerSettings :param tool_type: The toolkit type of this job. Possible values are: cntk, - tensorflow, caffe, caffe2, chainer, pytorch, custom. Possible values - include: 'cntk', 'tensorflow', 'caffe', 'caffe2', 'chainer', 'custom' + tensorflow, caffe, caffe2, chainer, pytorch, custom, mpi, horovod. + Possible values include: 'cntk', 'tensorflow', 'caffe', 'caffe2', + 'chainer', 'horovod', 'mpi', 'custom' :type tool_type: str or ~azure.mgmt.batchai.models.ToolType :param cntk_settings: Specifies the settings for CNTK (aka Microsoft Cognitive Toolkit) job. @@ -77,6 +71,10 @@ class Job(Resource): job. :type custom_toolkit_settings: ~azure.mgmt.batchai.models.CustomToolkitSettings + :param custom_mpi_settings: Specifies the settings for custom MPI job. + :type custom_mpi_settings: ~azure.mgmt.batchai.models.CustomMpiSettings + :param horovod_settings: Specifies the settings for Horovod job. + :type horovod_settings: ~azure.mgmt.batchai.models.HorovodSettings :param job_preparation: Specifies the actions to be performed before tool kit is launched. The specified actions will run on all the nodes that are part of the job @@ -112,7 +110,7 @@ class Job(Resource): entered its current provisioning state. The time at which the job entered its current provisioning state. :vartype provisioning_state_transition_time: datetime - :param execution_state: The current state of the job. The current state of + :ivar execution_state: The current state of the job. The current state of the job. Possible values are: queued - The job is queued and able to run. A job enters this state when it is created, or when it is awaiting a retry after a failed run. running - The job is running on a compute cluster. @@ -125,7 +123,7 @@ class Job(Resource): code) and has exhausted its retry limit. A job is also marked as failed if an error occurred launching the job. Possible values include: 'queued', 'running', 'terminating', 'succeeded', 'failed' - :type execution_state: str or ~azure.mgmt.batchai.models.ExecutionState + :vartype execution_state: str or ~azure.mgmt.batchai.models.ExecutionState :ivar execution_state_transition_time: The time at which the job entered its current execution state. The time at which the job entered its current execution state. @@ -140,11 +138,11 @@ class Job(Resource): 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, - 'location': {'readonly': True}, - 'tags': {'readonly': True}, + 'job_output_directory_path_segment': {'readonly': True}, 'creation_time': {'readonly': True}, 'provisioning_state': {'readonly': True}, 'provisioning_state_transition_time': {'readonly': True}, + 'execution_state': {'readonly': True}, 'execution_state_transition_time': {'readonly': True}, } @@ -152,10 +150,7 @@ class Job(Resource): 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'experiment_name': {'key': 'properties.experimentName', 'type': 'str'}, - 'priority': {'key': 'properties.priority', 'type': 'int'}, + 'priority': {'key': 'properties.priority', 'type': 'str'}, 'cluster': {'key': 'properties.cluster', 'type': 'ResourceId'}, 'mount_volumes': {'key': 'properties.mountVolumes', 'type': 'MountVolumes'}, 'job_output_directory_path_segment': {'key': 'properties.jobOutputDirectoryPathSegment', 'type': 'str'}, @@ -168,6 +163,8 @@ class Job(Resource): 'caffe_settings': {'key': 'properties.caffeSettings', 'type': 'CaffeSettings'}, 'chainer_settings': {'key': 'properties.chainerSettings', 'type': 'ChainerSettings'}, 'custom_toolkit_settings': {'key': 'properties.customToolkitSettings', 'type': 'CustomToolkitSettings'}, + 'custom_mpi_settings': {'key': 'properties.customMpiSettings', 'type': 'CustomMpiSettings'}, + 'horovod_settings': {'key': 'properties.horovodSettings', 'type': 'HorovodSettings'}, 'job_preparation': {'key': 'properties.jobPreparation', 'type': 'JobPreparation'}, 'std_out_err_path_prefix': {'key': 'properties.stdOutErrPathPrefix', 'type': 'str'}, 'input_directories': {'key': 'properties.inputDirectories', 'type': '[InputDirectory]'}, @@ -176,20 +173,19 @@ class Job(Resource): 'secrets': {'key': 'properties.secrets', 'type': '[EnvironmentVariableWithSecretValue]'}, 'constraints': {'key': 'properties.constraints', 'type': 'JobPropertiesConstraints'}, 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'ProvisioningState'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'provisioning_state_transition_time': {'key': 'properties.provisioningStateTransitionTime', 'type': 'iso-8601'}, - 'execution_state': {'key': 'properties.executionState', 'type': 'ExecutionState'}, + 'execution_state': {'key': 'properties.executionState', 'type': 'str'}, 'execution_state_transition_time': {'key': 'properties.executionStateTransitionTime', 'type': 'iso-8601'}, 'execution_info': {'key': 'properties.executionInfo', 'type': 'JobPropertiesExecutionInfo'}, } def __init__(self, **kwargs): super(Job, self).__init__(**kwargs) - self.experiment_name = kwargs.get('experiment_name', None) - self.priority = kwargs.get('priority', 0) + self.priority = kwargs.get('priority', "normal") self.cluster = kwargs.get('cluster', None) self.mount_volumes = kwargs.get('mount_volumes', None) - self.job_output_directory_path_segment = kwargs.get('job_output_directory_path_segment', None) + self.job_output_directory_path_segment = None self.node_count = kwargs.get('node_count', None) self.container_settings = kwargs.get('container_settings', None) self.tool_type = kwargs.get('tool_type', None) @@ -199,6 +195,8 @@ def __init__(self, **kwargs): self.caffe_settings = kwargs.get('caffe_settings', None) self.chainer_settings = kwargs.get('chainer_settings', None) self.custom_toolkit_settings = kwargs.get('custom_toolkit_settings', None) + self.custom_mpi_settings = kwargs.get('custom_mpi_settings', None) + self.horovod_settings = kwargs.get('horovod_settings', None) self.job_preparation = kwargs.get('job_preparation', None) self.std_out_err_path_prefix = kwargs.get('std_out_err_path_prefix', None) self.input_directories = kwargs.get('input_directories', None) @@ -209,6 +207,6 @@ def __init__(self, **kwargs): self.creation_time = None self.provisioning_state = None self.provisioning_state_transition_time = None - self.execution_state = kwargs.get('execution_state', None) + self.execution_state = None self.execution_state_transition_time = None self.execution_info = kwargs.get('execution_info', None) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/job_create_parameters.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/job_create_parameters.py index a82ebadf606f..f5b1d8e6c987 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/job_create_parameters.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/job_create_parameters.py @@ -17,17 +17,10 @@ class JobCreateParameters(Model): All required parameters must be populated in order to send to Azure. - :param location: Required. The region in which to create the job. - :type location: str - :param tags: The user specified tags associated with the job. - :type tags: dict[str, str] - :param experiment_name: Describe the experiment information of the job - :type experiment_name: str - :param priority: Priority associated with the job. Priority associated - with the job. Priority values can range from -1000 to 1000, with -1000 - being the lowest priority and 1000 being the highest priority. The default - value is 0. Default value: 0 . - :type priority: int + :param scheduling_priority: Scheduling priority associated with the job. + Scheduling priority associated with the job. Possible values include: + 'low', 'normal', 'high'. Default value: "normal" . + :type scheduling_priority: str or ~azure.mgmt.batchai.models.JobPriority :param cluster: Required. Specifies the Id of the cluster on which this job will run. :type cluster: ~azure.mgmt.batchai.models.ResourceId @@ -61,6 +54,10 @@ class JobCreateParameters(Model): job. :type custom_toolkit_settings: ~azure.mgmt.batchai.models.CustomToolkitSettings + :param custom_mpi_settings: Specifies the settings for custom MPI job. + :type custom_mpi_settings: ~azure.mgmt.batchai.models.CustomMpiSettings + :param horovod_settings: Specifies the settings for Horovod job. + :type horovod_settings: ~azure.mgmt.batchai.models.HorovodSettings :param job_preparation: Specifies the command line to be executed before tool kit is launched. The specified actions will run on all the nodes that are part of the job @@ -88,17 +85,13 @@ class JobCreateParameters(Model): """ _validation = { - 'location': {'required': True}, 'cluster': {'required': True}, 'node_count': {'required': True}, 'std_out_err_path_prefix': {'required': True}, } _attribute_map = { - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'experiment_name': {'key': 'properties.experimentName', 'type': 'str'}, - 'priority': {'key': 'properties.priority', 'type': 'int'}, + 'scheduling_priority': {'key': 'properties.schedulingPriority', 'type': 'str'}, 'cluster': {'key': 'properties.cluster', 'type': 'ResourceId'}, 'mount_volumes': {'key': 'properties.mountVolumes', 'type': 'MountVolumes'}, 'node_count': {'key': 'properties.nodeCount', 'type': 'int'}, @@ -110,6 +103,8 @@ class JobCreateParameters(Model): 'caffe2_settings': {'key': 'properties.caffe2Settings', 'type': 'Caffe2Settings'}, 'chainer_settings': {'key': 'properties.chainerSettings', 'type': 'ChainerSettings'}, 'custom_toolkit_settings': {'key': 'properties.customToolkitSettings', 'type': 'CustomToolkitSettings'}, + 'custom_mpi_settings': {'key': 'properties.customMpiSettings', 'type': 'CustomMpiSettings'}, + 'horovod_settings': {'key': 'properties.horovodSettings', 'type': 'HorovodSettings'}, 'job_preparation': {'key': 'properties.jobPreparation', 'type': 'JobPreparation'}, 'std_out_err_path_prefix': {'key': 'properties.stdOutErrPathPrefix', 'type': 'str'}, 'input_directories': {'key': 'properties.inputDirectories', 'type': '[InputDirectory]'}, @@ -121,10 +116,7 @@ class JobCreateParameters(Model): def __init__(self, **kwargs): super(JobCreateParameters, self).__init__(**kwargs) - self.location = kwargs.get('location', None) - self.tags = kwargs.get('tags', None) - self.experiment_name = kwargs.get('experiment_name', None) - self.priority = kwargs.get('priority', 0) + self.scheduling_priority = kwargs.get('scheduling_priority', "normal") self.cluster = kwargs.get('cluster', None) self.mount_volumes = kwargs.get('mount_volumes', None) self.node_count = kwargs.get('node_count', None) @@ -136,6 +128,8 @@ def __init__(self, **kwargs): self.caffe2_settings = kwargs.get('caffe2_settings', None) self.chainer_settings = kwargs.get('chainer_settings', None) self.custom_toolkit_settings = kwargs.get('custom_toolkit_settings', None) + self.custom_mpi_settings = kwargs.get('custom_mpi_settings', None) + self.horovod_settings = kwargs.get('horovod_settings', None) self.job_preparation = kwargs.get('job_preparation', None) self.std_out_err_path_prefix = kwargs.get('std_out_err_path_prefix', None) self.input_directories = kwargs.get('input_directories', None) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/job_create_parameters_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/job_create_parameters_py3.py index bd2e5d19b5a1..1d90aa79445d 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/job_create_parameters_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/job_create_parameters_py3.py @@ -17,17 +17,10 @@ class JobCreateParameters(Model): All required parameters must be populated in order to send to Azure. - :param location: Required. The region in which to create the job. - :type location: str - :param tags: The user specified tags associated with the job. - :type tags: dict[str, str] - :param experiment_name: Describe the experiment information of the job - :type experiment_name: str - :param priority: Priority associated with the job. Priority associated - with the job. Priority values can range from -1000 to 1000, with -1000 - being the lowest priority and 1000 being the highest priority. The default - value is 0. Default value: 0 . - :type priority: int + :param scheduling_priority: Scheduling priority associated with the job. + Scheduling priority associated with the job. Possible values include: + 'low', 'normal', 'high'. Default value: "normal" . + :type scheduling_priority: str or ~azure.mgmt.batchai.models.JobPriority :param cluster: Required. Specifies the Id of the cluster on which this job will run. :type cluster: ~azure.mgmt.batchai.models.ResourceId @@ -61,6 +54,10 @@ class JobCreateParameters(Model): job. :type custom_toolkit_settings: ~azure.mgmt.batchai.models.CustomToolkitSettings + :param custom_mpi_settings: Specifies the settings for custom MPI job. + :type custom_mpi_settings: ~azure.mgmt.batchai.models.CustomMpiSettings + :param horovod_settings: Specifies the settings for Horovod job. + :type horovod_settings: ~azure.mgmt.batchai.models.HorovodSettings :param job_preparation: Specifies the command line to be executed before tool kit is launched. The specified actions will run on all the nodes that are part of the job @@ -88,17 +85,13 @@ class JobCreateParameters(Model): """ _validation = { - 'location': {'required': True}, 'cluster': {'required': True}, 'node_count': {'required': True}, 'std_out_err_path_prefix': {'required': True}, } _attribute_map = { - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'experiment_name': {'key': 'properties.experimentName', 'type': 'str'}, - 'priority': {'key': 'properties.priority', 'type': 'int'}, + 'scheduling_priority': {'key': 'properties.schedulingPriority', 'type': 'str'}, 'cluster': {'key': 'properties.cluster', 'type': 'ResourceId'}, 'mount_volumes': {'key': 'properties.mountVolumes', 'type': 'MountVolumes'}, 'node_count': {'key': 'properties.nodeCount', 'type': 'int'}, @@ -110,6 +103,8 @@ class JobCreateParameters(Model): 'caffe2_settings': {'key': 'properties.caffe2Settings', 'type': 'Caffe2Settings'}, 'chainer_settings': {'key': 'properties.chainerSettings', 'type': 'ChainerSettings'}, 'custom_toolkit_settings': {'key': 'properties.customToolkitSettings', 'type': 'CustomToolkitSettings'}, + 'custom_mpi_settings': {'key': 'properties.customMpiSettings', 'type': 'CustomMpiSettings'}, + 'horovod_settings': {'key': 'properties.horovodSettings', 'type': 'HorovodSettings'}, 'job_preparation': {'key': 'properties.jobPreparation', 'type': 'JobPreparation'}, 'std_out_err_path_prefix': {'key': 'properties.stdOutErrPathPrefix', 'type': 'str'}, 'input_directories': {'key': 'properties.inputDirectories', 'type': '[InputDirectory]'}, @@ -119,12 +114,9 @@ class JobCreateParameters(Model): 'constraints': {'key': 'properties.constraints', 'type': 'JobBasePropertiesConstraints'}, } - def __init__(self, *, location: str, cluster, node_count: int, std_out_err_path_prefix: str, tags=None, experiment_name: str=None, priority: int=0, mount_volumes=None, container_settings=None, cntk_settings=None, py_torch_settings=None, tensor_flow_settings=None, caffe_settings=None, caffe2_settings=None, chainer_settings=None, custom_toolkit_settings=None, job_preparation=None, input_directories=None, output_directories=None, environment_variables=None, secrets=None, constraints=None, **kwargs) -> None: + def __init__(self, *, cluster, node_count: int, std_out_err_path_prefix: str, scheduling_priority="normal", mount_volumes=None, container_settings=None, cntk_settings=None, py_torch_settings=None, tensor_flow_settings=None, caffe_settings=None, caffe2_settings=None, chainer_settings=None, custom_toolkit_settings=None, custom_mpi_settings=None, horovod_settings=None, job_preparation=None, input_directories=None, output_directories=None, environment_variables=None, secrets=None, constraints=None, **kwargs) -> None: super(JobCreateParameters, self).__init__(**kwargs) - self.location = location - self.tags = tags - self.experiment_name = experiment_name - self.priority = priority + self.scheduling_priority = scheduling_priority self.cluster = cluster self.mount_volumes = mount_volumes self.node_count = node_count @@ -136,6 +128,8 @@ def __init__(self, *, location: str, cluster, node_count: int, std_out_err_path_ self.caffe2_settings = caffe2_settings self.chainer_settings = chainer_settings self.custom_toolkit_settings = custom_toolkit_settings + self.custom_mpi_settings = custom_mpi_settings + self.horovod_settings = horovod_settings self.job_preparation = job_preparation self.std_out_err_path_prefix = std_out_err_path_prefix self.input_directories = input_directories diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/job_properties_execution_info.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/job_properties_execution_info.py index 26a69dec8c3c..87e6c51d7b3e 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/job_properties_execution_info.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/job_properties_execution_info.py @@ -16,27 +16,31 @@ class JobPropertiesExecutionInfo(Model): """Contains information about the execution of a job in the Azure Batch service. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when + sending a request. - :param start_time: Required. The time at which the job started running. - 'Running' corresponds to the running state. If the job has been restarted - or retried, this is the most recent time at which the job started running. + :ivar start_time: The time at which the job started running. 'Running' + corresponds to the running state. If the job has been restarted or + retried, this is the most recent time at which the job started running. This property is present only for job that are in the running or completed state. - :type start_time: datetime - :param end_time: The time at which the job completed. This property is - only returned if the job is in completed state. - :type end_time: datetime - :param exit_code: The exit code of the job. This property is only returned + :vartype start_time: datetime + :ivar end_time: The time at which the job completed. This property is only + returned if the job is in completed state. + :vartype end_time: datetime + :ivar exit_code: The exit code of the job. This property is only returned if the job is in completed state. - :type exit_code: int - :param errors: Contains details of various errors encountered by the + :vartype exit_code: int + :ivar errors: Contains details of various errors encountered by the service during job execution. - :type errors: list[~azure.mgmt.batchai.models.BatchAIError] + :vartype errors: list[~azure.mgmt.batchai.models.BatchAIError] """ _validation = { - 'start_time': {'required': True}, + 'start_time': {'readonly': True}, + 'end_time': {'readonly': True}, + 'exit_code': {'readonly': True}, + 'errors': {'readonly': True}, } _attribute_map = { @@ -48,7 +52,7 @@ class JobPropertiesExecutionInfo(Model): def __init__(self, **kwargs): super(JobPropertiesExecutionInfo, self).__init__(**kwargs) - self.start_time = kwargs.get('start_time', None) - self.end_time = kwargs.get('end_time', None) - self.exit_code = kwargs.get('exit_code', None) - self.errors = kwargs.get('errors', None) + self.start_time = None + self.end_time = None + self.exit_code = None + self.errors = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/job_properties_execution_info_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/job_properties_execution_info_py3.py index 4f2299253c71..b11d1316f534 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/job_properties_execution_info_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/job_properties_execution_info_py3.py @@ -16,27 +16,31 @@ class JobPropertiesExecutionInfo(Model): """Contains information about the execution of a job in the Azure Batch service. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when + sending a request. - :param start_time: Required. The time at which the job started running. - 'Running' corresponds to the running state. If the job has been restarted - or retried, this is the most recent time at which the job started running. + :ivar start_time: The time at which the job started running. 'Running' + corresponds to the running state. If the job has been restarted or + retried, this is the most recent time at which the job started running. This property is present only for job that are in the running or completed state. - :type start_time: datetime - :param end_time: The time at which the job completed. This property is - only returned if the job is in completed state. - :type end_time: datetime - :param exit_code: The exit code of the job. This property is only returned + :vartype start_time: datetime + :ivar end_time: The time at which the job completed. This property is only + returned if the job is in completed state. + :vartype end_time: datetime + :ivar exit_code: The exit code of the job. This property is only returned if the job is in completed state. - :type exit_code: int - :param errors: Contains details of various errors encountered by the + :vartype exit_code: int + :ivar errors: Contains details of various errors encountered by the service during job execution. - :type errors: list[~azure.mgmt.batchai.models.BatchAIError] + :vartype errors: list[~azure.mgmt.batchai.models.BatchAIError] """ _validation = { - 'start_time': {'required': True}, + 'start_time': {'readonly': True}, + 'end_time': {'readonly': True}, + 'exit_code': {'readonly': True}, + 'errors': {'readonly': True}, } _attribute_map = { @@ -46,9 +50,9 @@ class JobPropertiesExecutionInfo(Model): 'errors': {'key': 'errors', 'type': '[BatchAIError]'}, } - def __init__(self, *, start_time, end_time=None, exit_code: int=None, errors=None, **kwargs) -> None: + def __init__(self, **kwargs) -> None: super(JobPropertiesExecutionInfo, self).__init__(**kwargs) - self.start_time = start_time - self.end_time = end_time - self.exit_code = exit_code - self.errors = errors + self.start_time = None + self.end_time = None + self.exit_code = None + self.errors = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/job_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/job_py3.py index b6ffa318f5ef..6491856115c5 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/job_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/job_py3.py @@ -9,32 +9,25 @@ # regenerated. # -------------------------------------------------------------------------- -from .resource import Resource +from .proxy_resource import ProxyResource -class Job(Resource): - """Contains information about the job. +class Job(ProxyResource): + """Contains information about a Job. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: The ID of the resource + :ivar id: The ID of the resource. :vartype id: str - :ivar name: The name of the resource + :ivar name: The name of the resource. :vartype name: str - :ivar type: The type of the resource + :ivar type: The type of the resource. :vartype type: str - :ivar location: The location of the resource - :vartype location: str - :ivar tags: The tags of the resource - :vartype tags: dict[str, str] - :param experiment_name: Describe the experiment information of the job - :type experiment_name: str :param priority: Priority associated with the job. Priority associated - with the job. Priority values can range from -1000 to 1000, with -1000 - being the lowest priority and 1000 being the highest priority. The default - value is 0. Default value: 0 . - :type priority: int + with the job. Possible values include: 'low', 'normal', 'high'. Default + value: "normal" . + :type priority: str or ~azure.mgmt.batchai.models.JobPriority :param cluster: Specifies the Id of the cluster on which this job will run. :type cluster: ~azure.mgmt.batchai.models.ResourceId @@ -43,13 +36,13 @@ class Job(Resource): unmouted after the job completion. The volumes will be mounted at location specified by $AZ_BATCHAI_JOB_MOUNT_ROOT environment variable. :type mount_volumes: ~azure.mgmt.batchai.models.MountVolumes - :param job_output_directory_path_segment: A segment of job's output + :ivar job_output_directory_path_segment: A segment of job's output directories path created by BatchAI. Batch AI creates job's output directories under an unique path to avoid conflicts between jobs. This value contains a path segment generated by Batch AI to make the path unique and can be used to find the output directory on the node or mounted filesystem. - :type job_output_directory_path_segment: str + :vartype job_output_directory_path_segment: str :param node_count: Number of compute nodes to run the job on. The job will be gang scheduled on that many compute nodes :type node_count: int @@ -59,8 +52,9 @@ class Job(Resource): on the VM. :type container_settings: ~azure.mgmt.batchai.models.ContainerSettings :param tool_type: The toolkit type of this job. Possible values are: cntk, - tensorflow, caffe, caffe2, chainer, pytorch, custom. Possible values - include: 'cntk', 'tensorflow', 'caffe', 'caffe2', 'chainer', 'custom' + tensorflow, caffe, caffe2, chainer, pytorch, custom, mpi, horovod. + Possible values include: 'cntk', 'tensorflow', 'caffe', 'caffe2', + 'chainer', 'horovod', 'mpi', 'custom' :type tool_type: str or ~azure.mgmt.batchai.models.ToolType :param cntk_settings: Specifies the settings for CNTK (aka Microsoft Cognitive Toolkit) job. @@ -77,6 +71,10 @@ class Job(Resource): job. :type custom_toolkit_settings: ~azure.mgmt.batchai.models.CustomToolkitSettings + :param custom_mpi_settings: Specifies the settings for custom MPI job. + :type custom_mpi_settings: ~azure.mgmt.batchai.models.CustomMpiSettings + :param horovod_settings: Specifies the settings for Horovod job. + :type horovod_settings: ~azure.mgmt.batchai.models.HorovodSettings :param job_preparation: Specifies the actions to be performed before tool kit is launched. The specified actions will run on all the nodes that are part of the job @@ -112,7 +110,7 @@ class Job(Resource): entered its current provisioning state. The time at which the job entered its current provisioning state. :vartype provisioning_state_transition_time: datetime - :param execution_state: The current state of the job. The current state of + :ivar execution_state: The current state of the job. The current state of the job. Possible values are: queued - The job is queued and able to run. A job enters this state when it is created, or when it is awaiting a retry after a failed run. running - The job is running on a compute cluster. @@ -125,7 +123,7 @@ class Job(Resource): code) and has exhausted its retry limit. A job is also marked as failed if an error occurred launching the job. Possible values include: 'queued', 'running', 'terminating', 'succeeded', 'failed' - :type execution_state: str or ~azure.mgmt.batchai.models.ExecutionState + :vartype execution_state: str or ~azure.mgmt.batchai.models.ExecutionState :ivar execution_state_transition_time: The time at which the job entered its current execution state. The time at which the job entered its current execution state. @@ -140,11 +138,11 @@ class Job(Resource): 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, - 'location': {'readonly': True}, - 'tags': {'readonly': True}, + 'job_output_directory_path_segment': {'readonly': True}, 'creation_time': {'readonly': True}, 'provisioning_state': {'readonly': True}, 'provisioning_state_transition_time': {'readonly': True}, + 'execution_state': {'readonly': True}, 'execution_state_transition_time': {'readonly': True}, } @@ -152,10 +150,7 @@ class Job(Resource): 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'experiment_name': {'key': 'properties.experimentName', 'type': 'str'}, - 'priority': {'key': 'properties.priority', 'type': 'int'}, + 'priority': {'key': 'properties.priority', 'type': 'str'}, 'cluster': {'key': 'properties.cluster', 'type': 'ResourceId'}, 'mount_volumes': {'key': 'properties.mountVolumes', 'type': 'MountVolumes'}, 'job_output_directory_path_segment': {'key': 'properties.jobOutputDirectoryPathSegment', 'type': 'str'}, @@ -168,6 +163,8 @@ class Job(Resource): 'caffe_settings': {'key': 'properties.caffeSettings', 'type': 'CaffeSettings'}, 'chainer_settings': {'key': 'properties.chainerSettings', 'type': 'ChainerSettings'}, 'custom_toolkit_settings': {'key': 'properties.customToolkitSettings', 'type': 'CustomToolkitSettings'}, + 'custom_mpi_settings': {'key': 'properties.customMpiSettings', 'type': 'CustomMpiSettings'}, + 'horovod_settings': {'key': 'properties.horovodSettings', 'type': 'HorovodSettings'}, 'job_preparation': {'key': 'properties.jobPreparation', 'type': 'JobPreparation'}, 'std_out_err_path_prefix': {'key': 'properties.stdOutErrPathPrefix', 'type': 'str'}, 'input_directories': {'key': 'properties.inputDirectories', 'type': '[InputDirectory]'}, @@ -176,20 +173,19 @@ class Job(Resource): 'secrets': {'key': 'properties.secrets', 'type': '[EnvironmentVariableWithSecretValue]'}, 'constraints': {'key': 'properties.constraints', 'type': 'JobPropertiesConstraints'}, 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'ProvisioningState'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'provisioning_state_transition_time': {'key': 'properties.provisioningStateTransitionTime', 'type': 'iso-8601'}, - 'execution_state': {'key': 'properties.executionState', 'type': 'ExecutionState'}, + 'execution_state': {'key': 'properties.executionState', 'type': 'str'}, 'execution_state_transition_time': {'key': 'properties.executionStateTransitionTime', 'type': 'iso-8601'}, 'execution_info': {'key': 'properties.executionInfo', 'type': 'JobPropertiesExecutionInfo'}, } - def __init__(self, *, experiment_name: str=None, priority: int=0, cluster=None, mount_volumes=None, job_output_directory_path_segment: str=None, node_count: int=None, container_settings=None, tool_type=None, cntk_settings=None, py_torch_settings=None, tensor_flow_settings=None, caffe_settings=None, chainer_settings=None, custom_toolkit_settings=None, job_preparation=None, std_out_err_path_prefix: str=None, input_directories=None, output_directories=None, environment_variables=None, secrets=None, constraints=None, execution_state=None, execution_info=None, **kwargs) -> None: + def __init__(self, *, priority="normal", cluster=None, mount_volumes=None, node_count: int=None, container_settings=None, tool_type=None, cntk_settings=None, py_torch_settings=None, tensor_flow_settings=None, caffe_settings=None, chainer_settings=None, custom_toolkit_settings=None, custom_mpi_settings=None, horovod_settings=None, job_preparation=None, std_out_err_path_prefix: str=None, input_directories=None, output_directories=None, environment_variables=None, secrets=None, constraints=None, execution_info=None, **kwargs) -> None: super(Job, self).__init__(**kwargs) - self.experiment_name = experiment_name self.priority = priority self.cluster = cluster self.mount_volumes = mount_volumes - self.job_output_directory_path_segment = job_output_directory_path_segment + self.job_output_directory_path_segment = None self.node_count = node_count self.container_settings = container_settings self.tool_type = tool_type @@ -199,6 +195,8 @@ def __init__(self, *, experiment_name: str=None, priority: int=0, cluster=None, self.caffe_settings = caffe_settings self.chainer_settings = chainer_settings self.custom_toolkit_settings = custom_toolkit_settings + self.custom_mpi_settings = custom_mpi_settings + self.horovod_settings = horovod_settings self.job_preparation = job_preparation self.std_out_err_path_prefix = std_out_err_path_prefix self.input_directories = input_directories @@ -209,6 +207,6 @@ def __init__(self, *, experiment_name: str=None, priority: int=0, cluster=None, self.creation_time = None self.provisioning_state = None self.provisioning_state_transition_time = None - self.execution_state = execution_state + self.execution_state = None self.execution_state_transition_time = None self.execution_info = execution_info diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/jobs_list_by_experiment_options.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/jobs_list_by_experiment_options.py new file mode 100644 index 000000000000..42b33b199219 --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/jobs_list_by_experiment_options.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class JobsListByExperimentOptions(Model): + """Additional parameters for list_by_experiment operation. + + :param max_results: The maximum number of items to return in the response. + A maximum of 1000 files can be returned. Default value: 1000 . + :type max_results: int + """ + + _attribute_map = { + 'max_results': {'key': '', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(JobsListByExperimentOptions, self).__init__(**kwargs) + self.max_results = kwargs.get('max_results', 1000) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/jobs_list_by_experiment_options_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/jobs_list_by_experiment_options_py3.py new file mode 100644 index 000000000000..f34e5483254f --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/jobs_list_by_experiment_options_py3.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class JobsListByExperimentOptions(Model): + """Additional parameters for list_by_experiment operation. + + :param max_results: The maximum number of items to return in the response. + A maximum of 1000 files can be returned. Default value: 1000 . + :type max_results: int + """ + + _attribute_map = { + 'max_results': {'key': '', 'type': 'int'}, + } + + def __init__(self, *, max_results: int=1000, **kwargs) -> None: + super(JobsListByExperimentOptions, self).__init__(**kwargs) + self.max_results = max_results diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/key_vault_key_reference.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/key_vault_key_reference.py deleted file mode 100644 index 65410770db47..000000000000 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/key_vault_key_reference.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class KeyVaultKeyReference(Model): - """Describes a reference to Key Vault Key. - - All required parameters must be populated in order to send to Azure. - - :param source_vault: Required. Fully qualified resource Id for the Key - Vault. - :type source_vault: ~azure.mgmt.batchai.models.ResourceId - :param key_url: Required. The URL referencing a key in a Key Vault. - :type key_url: str - """ - - _validation = { - 'source_vault': {'required': True}, - 'key_url': {'required': True}, - } - - _attribute_map = { - 'source_vault': {'key': 'sourceVault', 'type': 'ResourceId'}, - 'key_url': {'key': 'keyUrl', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(KeyVaultKeyReference, self).__init__(**kwargs) - self.source_vault = kwargs.get('source_vault', None) - self.key_url = kwargs.get('key_url', None) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/key_vault_key_reference_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/key_vault_key_reference_py3.py deleted file mode 100644 index 73c4f73c1067..000000000000 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/key_vault_key_reference_py3.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class KeyVaultKeyReference(Model): - """Describes a reference to Key Vault Key. - - All required parameters must be populated in order to send to Azure. - - :param source_vault: Required. Fully qualified resource Id for the Key - Vault. - :type source_vault: ~azure.mgmt.batchai.models.ResourceId - :param key_url: Required. The URL referencing a key in a Key Vault. - :type key_url: str - """ - - _validation = { - 'source_vault': {'required': True}, - 'key_url': {'required': True}, - } - - _attribute_map = { - 'source_vault': {'key': 'sourceVault', 'type': 'ResourceId'}, - 'key_url': {'key': 'keyUrl', 'type': 'str'}, - } - - def __init__(self, *, source_vault, key_url: str, **kwargs) -> None: - super(KeyVaultKeyReference, self).__init__(**kwargs) - self.source_vault = source_vault - self.key_url = key_url diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/local_data_volume.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/local_data_volume.py deleted file mode 100644 index ce2dbd9b6c04..000000000000 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/local_data_volume.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class LocalDataVolume(Model): - """Represents mapping of host directories to directories in the container. - - All required parameters must be populated in order to send to Azure. - - :param host_path: Required. The path on the host that is to be mounted as - a directory in the container. - :type host_path: str - :param local_path: Required. The container local path where the host - directory is mounted. - :type local_path: str - """ - - _validation = { - 'host_path': {'required': True}, - 'local_path': {'required': True}, - } - - _attribute_map = { - 'host_path': {'key': 'hostPath', 'type': 'str'}, - 'local_path': {'key': 'localPath', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(LocalDataVolume, self).__init__(**kwargs) - self.host_path = kwargs.get('host_path', None) - self.local_path = kwargs.get('local_path', None) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/local_data_volume_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/local_data_volume_py3.py deleted file mode 100644 index ab954dd286e1..000000000000 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/local_data_volume_py3.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class LocalDataVolume(Model): - """Represents mapping of host directories to directories in the container. - - All required parameters must be populated in order to send to Azure. - - :param host_path: Required. The path on the host that is to be mounted as - a directory in the container. - :type host_path: str - :param local_path: Required. The container local path where the host - directory is mounted. - :type local_path: str - """ - - _validation = { - 'host_path': {'required': True}, - 'local_path': {'required': True}, - } - - _attribute_map = { - 'host_path': {'key': 'hostPath', 'type': 'str'}, - 'local_path': {'key': 'localPath', 'type': 'str'}, - } - - def __init__(self, *, host_path: str, local_path: str, **kwargs) -> None: - super(LocalDataVolume, self).__init__(**kwargs) - self.host_path = host_path - self.local_path = local_path diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/manual_scale_settings.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/manual_scale_settings.py index 77f749b732c4..c5323a0e37a9 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/manual_scale_settings.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/manual_scale_settings.py @@ -24,7 +24,7 @@ class ManualScaleSettings(Model): :param node_deallocation_option: Determines what to do with the job(s) running on compute node if the Cluster size is decreasing. The default value is requeue. Possible values include: 'requeue', 'terminate', - 'waitforjobcompletion', 'unknown'. Default value: "requeue" . + 'waitforjobcompletion'. Default value: "requeue" . :type node_deallocation_option: str or ~azure.mgmt.batchai.models.DeallocationOption """ @@ -35,7 +35,7 @@ class ManualScaleSettings(Model): _attribute_map = { 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'}, - 'node_deallocation_option': {'key': 'nodeDeallocationOption', 'type': 'DeallocationOption'}, + 'node_deallocation_option': {'key': 'nodeDeallocationOption', 'type': 'str'}, } def __init__(self, **kwargs): diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/manual_scale_settings_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/manual_scale_settings_py3.py index 1920de3c57a7..c5d0475eadde 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/manual_scale_settings_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/manual_scale_settings_py3.py @@ -24,7 +24,7 @@ class ManualScaleSettings(Model): :param node_deallocation_option: Determines what to do with the job(s) running on compute node if the Cluster size is decreasing. The default value is requeue. Possible values include: 'requeue', 'terminate', - 'waitforjobcompletion', 'unknown'. Default value: "requeue" . + 'waitforjobcompletion'. Default value: "requeue" . :type node_deallocation_option: str or ~azure.mgmt.batchai.models.DeallocationOption """ @@ -35,7 +35,7 @@ class ManualScaleSettings(Model): _attribute_map = { 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'}, - 'node_deallocation_option': {'key': 'nodeDeallocationOption', 'type': 'DeallocationOption'}, + 'node_deallocation_option': {'key': 'nodeDeallocationOption', 'type': 'str'}, } def __init__(self, *, target_node_count: int=0, node_deallocation_option="requeue", **kwargs) -> None: diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/mount_settings.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/mount_settings.py index bdb4f004c9eb..aa645ca963e1 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/mount_settings.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/mount_settings.py @@ -22,16 +22,12 @@ class MountSettings(Model): :param file_server_internal_ip: Internal subnet IP which can be used to access the file Server from within the subnet. :type file_server_internal_ip: str - :param file_server_type: Type of the fileserver e.g. nfs, glusterfs etc. - Possible values include: 'nfs', 'glusterfs' - :type file_server_type: str or ~azure.mgmt.batchai.models.FileServerType """ _attribute_map = { 'mount_point': {'key': 'mountPoint', 'type': 'str'}, 'file_server_public_ip': {'key': 'fileServerPublicIP', 'type': 'str'}, 'file_server_internal_ip': {'key': 'fileServerInternalIP', 'type': 'str'}, - 'file_server_type': {'key': 'fileServerType', 'type': 'str'}, } def __init__(self, **kwargs): @@ -39,4 +35,3 @@ def __init__(self, **kwargs): self.mount_point = kwargs.get('mount_point', None) self.file_server_public_ip = kwargs.get('file_server_public_ip', None) self.file_server_internal_ip = kwargs.get('file_server_internal_ip', None) - self.file_server_type = kwargs.get('file_server_type', None) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/mount_settings_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/mount_settings_py3.py index 434a158332d1..0829aa2d51da 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/mount_settings_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/mount_settings_py3.py @@ -22,21 +22,16 @@ class MountSettings(Model): :param file_server_internal_ip: Internal subnet IP which can be used to access the file Server from within the subnet. :type file_server_internal_ip: str - :param file_server_type: Type of the fileserver e.g. nfs, glusterfs etc. - Possible values include: 'nfs', 'glusterfs' - :type file_server_type: str or ~azure.mgmt.batchai.models.FileServerType """ _attribute_map = { 'mount_point': {'key': 'mountPoint', 'type': 'str'}, 'file_server_public_ip': {'key': 'fileServerPublicIP', 'type': 'str'}, 'file_server_internal_ip': {'key': 'fileServerInternalIP', 'type': 'str'}, - 'file_server_type': {'key': 'fileServerType', 'type': 'str'}, } - def __init__(self, *, mount_point: str=None, file_server_public_ip: str=None, file_server_internal_ip: str=None, file_server_type=None, **kwargs) -> None: + def __init__(self, *, mount_point: str=None, file_server_public_ip: str=None, file_server_internal_ip: str=None, **kwargs) -> None: super(MountSettings, self).__init__(**kwargs) self.mount_point = mount_point self.file_server_public_ip = file_server_public_ip self.file_server_internal_ip = file_server_internal_ip - self.file_server_type = file_server_type diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/node_state_counts.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/node_state_counts.py index 015cc1a96ba1..6e9b199c5deb 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/node_state_counts.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/node_state_counts.py @@ -15,30 +15,29 @@ class NodeStateCounts(Model): """Counts of various compute node states on the cluster. - All required parameters must be populated in order to send to Azure. - - :param idle_node_count: Required. Number of compute nodes in idle state. - :type idle_node_count: int - :param running_node_count: Required. Number of compute nodes which are - running jobs. - :type running_node_count: int - :param preparing_node_count: Required. Number of compute nodes which are - being prepared. - :type preparing_node_count: int - :param unusable_node_count: Required. Number of compute nodes which are - unusable. - :type unusable_node_count: int - :param leaving_node_count: Required. Number of compute nodes which are - leaving the cluster. - :type leaving_node_count: int + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar idle_node_count: Number of compute nodes in idle state. + :vartype idle_node_count: int + :ivar running_node_count: Number of compute nodes which are running jobs. + :vartype running_node_count: int + :ivar preparing_node_count: Number of compute nodes which are being + prepared. + :vartype preparing_node_count: int + :ivar unusable_node_count: Number of compute nodes which are unusable. + :vartype unusable_node_count: int + :ivar leaving_node_count: Number of compute nodes which are leaving the + cluster. + :vartype leaving_node_count: int """ _validation = { - 'idle_node_count': {'required': True}, - 'running_node_count': {'required': True}, - 'preparing_node_count': {'required': True}, - 'unusable_node_count': {'required': True}, - 'leaving_node_count': {'required': True}, + 'idle_node_count': {'readonly': True}, + 'running_node_count': {'readonly': True}, + 'preparing_node_count': {'readonly': True}, + 'unusable_node_count': {'readonly': True}, + 'leaving_node_count': {'readonly': True}, } _attribute_map = { @@ -51,8 +50,8 @@ class NodeStateCounts(Model): def __init__(self, **kwargs): super(NodeStateCounts, self).__init__(**kwargs) - self.idle_node_count = kwargs.get('idle_node_count', None) - self.running_node_count = kwargs.get('running_node_count', None) - self.preparing_node_count = kwargs.get('preparing_node_count', None) - self.unusable_node_count = kwargs.get('unusable_node_count', None) - self.leaving_node_count = kwargs.get('leaving_node_count', None) + self.idle_node_count = None + self.running_node_count = None + self.preparing_node_count = None + self.unusable_node_count = None + self.leaving_node_count = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/node_state_counts_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/node_state_counts_py3.py index beec9a6692d5..8618a9195cda 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/node_state_counts_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/node_state_counts_py3.py @@ -15,30 +15,29 @@ class NodeStateCounts(Model): """Counts of various compute node states on the cluster. - All required parameters must be populated in order to send to Azure. - - :param idle_node_count: Required. Number of compute nodes in idle state. - :type idle_node_count: int - :param running_node_count: Required. Number of compute nodes which are - running jobs. - :type running_node_count: int - :param preparing_node_count: Required. Number of compute nodes which are - being prepared. - :type preparing_node_count: int - :param unusable_node_count: Required. Number of compute nodes which are - unusable. - :type unusable_node_count: int - :param leaving_node_count: Required. Number of compute nodes which are - leaving the cluster. - :type leaving_node_count: int + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar idle_node_count: Number of compute nodes in idle state. + :vartype idle_node_count: int + :ivar running_node_count: Number of compute nodes which are running jobs. + :vartype running_node_count: int + :ivar preparing_node_count: Number of compute nodes which are being + prepared. + :vartype preparing_node_count: int + :ivar unusable_node_count: Number of compute nodes which are unusable. + :vartype unusable_node_count: int + :ivar leaving_node_count: Number of compute nodes which are leaving the + cluster. + :vartype leaving_node_count: int """ _validation = { - 'idle_node_count': {'required': True}, - 'running_node_count': {'required': True}, - 'preparing_node_count': {'required': True}, - 'unusable_node_count': {'required': True}, - 'leaving_node_count': {'required': True}, + 'idle_node_count': {'readonly': True}, + 'running_node_count': {'readonly': True}, + 'preparing_node_count': {'readonly': True}, + 'unusable_node_count': {'readonly': True}, + 'leaving_node_count': {'readonly': True}, } _attribute_map = { @@ -49,10 +48,10 @@ class NodeStateCounts(Model): 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'}, } - def __init__(self, *, idle_node_count: int, running_node_count: int, preparing_node_count: int, unusable_node_count: int, leaving_node_count: int, **kwargs) -> None: + def __init__(self, **kwargs) -> None: super(NodeStateCounts, self).__init__(**kwargs) - self.idle_node_count = idle_node_count - self.running_node_count = running_node_count - self.preparing_node_count = preparing_node_count - self.unusable_node_count = unusable_node_count - self.leaving_node_count = leaving_node_count + self.idle_node_count = None + self.running_node_count = None + self.preparing_node_count = None + self.unusable_node_count = None + self.leaving_node_count = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/operation.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/operation.py index c297919824f1..aed48b3beb11 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/operation.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/operation.py @@ -17,17 +17,25 @@ class Operation(Model): Details of a REST API operation. - :param name: The operation name. This is of the format + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: The operation name. This is of the format {provider}/{resource}/{operation} - :type name: str + :vartype name: str :param display: The object that describes the operation. :type display: ~azure.mgmt.batchai.models.OperationDisplay - :param origin: The intended executor of the operation. - :type origin: str + :ivar origin: The intended executor of the operation. + :vartype origin: str :param properties: Properties of the operation. :type properties: object """ + _validation = { + 'name': {'readonly': True}, + 'origin': {'readonly': True}, + } + _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'display': {'key': 'display', 'type': 'OperationDisplay'}, @@ -37,7 +45,7 @@ class Operation(Model): def __init__(self, **kwargs): super(Operation, self).__init__(**kwargs) - self.name = kwargs.get('name', None) + self.name = None self.display = kwargs.get('display', None) - self.origin = kwargs.get('origin', None) + self.origin = None self.properties = kwargs.get('properties', None) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/operation_display.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/operation_display.py index 99487e3cb825..37f8ff623cfb 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/operation_display.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/operation_display.py @@ -15,17 +15,27 @@ class OperationDisplay(Model): """The object that describes the operation. - :param provider: Friendly name of the resource provider. - :type provider: str - :param operation: The operation type. For example: read, write, delete, or + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar provider: Friendly name of the resource provider. + :vartype provider: str + :ivar operation: The operation type. For example: read, write, delete, or listKeys/action - :type operation: str - :param resource: The resource type on which the operation is performed. - :type resource: str - :param description: The friendly name of the operation. - :type description: str + :vartype operation: str + :ivar resource: The resource type on which the operation is performed. + :vartype resource: str + :ivar description: The friendly name of the operation. + :vartype description: str """ + _validation = { + 'provider': {'readonly': True}, + 'operation': {'readonly': True}, + 'resource': {'readonly': True}, + 'description': {'readonly': True}, + } + _attribute_map = { 'provider': {'key': 'provider', 'type': 'str'}, 'operation': {'key': 'operation', 'type': 'str'}, @@ -35,7 +45,7 @@ class OperationDisplay(Model): def __init__(self, **kwargs): super(OperationDisplay, self).__init__(**kwargs) - self.provider = kwargs.get('provider', None) - self.operation = kwargs.get('operation', None) - self.resource = kwargs.get('resource', None) - self.description = kwargs.get('description', None) + self.provider = None + self.operation = None + self.resource = None + self.description = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/operation_display_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/operation_display_py3.py index ce54ece35d47..2b1860159812 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/operation_display_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/operation_display_py3.py @@ -15,17 +15,27 @@ class OperationDisplay(Model): """The object that describes the operation. - :param provider: Friendly name of the resource provider. - :type provider: str - :param operation: The operation type. For example: read, write, delete, or + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar provider: Friendly name of the resource provider. + :vartype provider: str + :ivar operation: The operation type. For example: read, write, delete, or listKeys/action - :type operation: str - :param resource: The resource type on which the operation is performed. - :type resource: str - :param description: The friendly name of the operation. - :type description: str + :vartype operation: str + :ivar resource: The resource type on which the operation is performed. + :vartype resource: str + :ivar description: The friendly name of the operation. + :vartype description: str """ + _validation = { + 'provider': {'readonly': True}, + 'operation': {'readonly': True}, + 'resource': {'readonly': True}, + 'description': {'readonly': True}, + } + _attribute_map = { 'provider': {'key': 'provider', 'type': 'str'}, 'operation': {'key': 'operation', 'type': 'str'}, @@ -33,9 +43,9 @@ class OperationDisplay(Model): 'description': {'key': 'description', 'type': 'str'}, } - def __init__(self, *, provider: str=None, operation: str=None, resource: str=None, description: str=None, **kwargs) -> None: + def __init__(self, **kwargs) -> None: super(OperationDisplay, self).__init__(**kwargs) - self.provider = provider - self.operation = operation - self.resource = resource - self.description = description + self.provider = None + self.operation = None + self.resource = None + self.description = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/operation_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/operation_py3.py index 55c875691feb..5ed40c6d7d51 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/operation_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/operation_py3.py @@ -17,17 +17,25 @@ class Operation(Model): Details of a REST API operation. - :param name: The operation name. This is of the format + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: The operation name. This is of the format {provider}/{resource}/{operation} - :type name: str + :vartype name: str :param display: The object that describes the operation. :type display: ~azure.mgmt.batchai.models.OperationDisplay - :param origin: The intended executor of the operation. - :type origin: str + :ivar origin: The intended executor of the operation. + :vartype origin: str :param properties: Properties of the operation. :type properties: object """ + _validation = { + 'name': {'readonly': True}, + 'origin': {'readonly': True}, + } + _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'display': {'key': 'display', 'type': 'OperationDisplay'}, @@ -35,9 +43,9 @@ class Operation(Model): 'properties': {'key': 'properties', 'type': 'object'}, } - def __init__(self, *, name: str=None, display=None, origin: str=None, properties=None, **kwargs) -> None: + def __init__(self, *, display=None, properties=None, **kwargs) -> None: super(Operation, self).__init__(**kwargs) - self.name = name + self.name = None self.display = display - self.origin = origin + self.origin = None self.properties = properties diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/output_directory.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/output_directory.py index e09a503280d6..15f7b35aa30a 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/output_directory.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/output_directory.py @@ -33,16 +33,6 @@ class OutputDirectory(Model): pathPrefix, jobOutputDirectoryPathSegment (reported by get job) and pathSuffix. :type path_suffix: str - :param type: An enumeration, which specifies the type of job output - directory. Default value is Custom. The possible values are Model, Logs, - Summary, and Custom. Users can use multiple enums for a single directory. - Eg. outPutType='Model,Logs, Summary'. Possible values include: 'model', - 'logs', 'summary', 'custom'. Default value: "custom" . - :type type: str or ~azure.mgmt.batchai.models.OutputType - :param create_new: True to create new directory. Default is true. If - false, then the directory is not created and can be any directory path - that the user specifies. Default value: True . - :type create_new: bool """ _validation = { @@ -54,8 +44,6 @@ class OutputDirectory(Model): 'id': {'key': 'id', 'type': 'str'}, 'path_prefix': {'key': 'pathPrefix', 'type': 'str'}, 'path_suffix': {'key': 'pathSuffix', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'create_new': {'key': 'createNew', 'type': 'bool'}, } def __init__(self, **kwargs): @@ -63,5 +51,3 @@ def __init__(self, **kwargs): self.id = kwargs.get('id', None) self.path_prefix = kwargs.get('path_prefix', None) self.path_suffix = kwargs.get('path_suffix', None) - self.type = kwargs.get('type', "custom") - self.create_new = kwargs.get('create_new', True) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/output_directory_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/output_directory_py3.py index f5327d45d4a4..78d37348e91a 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/output_directory_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/output_directory_py3.py @@ -33,16 +33,6 @@ class OutputDirectory(Model): pathPrefix, jobOutputDirectoryPathSegment (reported by get job) and pathSuffix. :type path_suffix: str - :param type: An enumeration, which specifies the type of job output - directory. Default value is Custom. The possible values are Model, Logs, - Summary, and Custom. Users can use multiple enums for a single directory. - Eg. outPutType='Model,Logs, Summary'. Possible values include: 'model', - 'logs', 'summary', 'custom'. Default value: "custom" . - :type type: str or ~azure.mgmt.batchai.models.OutputType - :param create_new: True to create new directory. Default is true. If - false, then the directory is not created and can be any directory path - that the user specifies. Default value: True . - :type create_new: bool """ _validation = { @@ -54,14 +44,10 @@ class OutputDirectory(Model): 'id': {'key': 'id', 'type': 'str'}, 'path_prefix': {'key': 'pathPrefix', 'type': 'str'}, 'path_suffix': {'key': 'pathSuffix', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'create_new': {'key': 'createNew', 'type': 'bool'}, } - def __init__(self, *, id: str, path_prefix: str, path_suffix: str=None, type="custom", create_new: bool=True, **kwargs) -> None: + def __init__(self, *, id: str, path_prefix: str, path_suffix: str=None, **kwargs) -> None: super(OutputDirectory, self).__init__(**kwargs) self.id = id self.path_prefix = path_prefix self.path_suffix = path_suffix - self.type = type - self.create_new = create_new diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/proxy_resource.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/proxy_resource.py new file mode 100644 index 000000000000..092275331f85 --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/proxy_resource.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ProxyResource(Model): + """A definition of an Azure proxy resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The ID of the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ProxyResource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/proxy_resource_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/proxy_resource_py3.py new file mode 100644 index 000000000000..ea4abf445f82 --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/proxy_resource_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ProxyResource(Model): + """A definition of an Azure proxy resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The ID of the resource. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(ProxyResource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/remote_login_information.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/remote_login_information.py index 91fab4aeb0b6..3a44f20ef1c8 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/remote_login_information.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/remote_login_information.py @@ -15,20 +15,21 @@ class RemoteLoginInformation(Model): """Contains remote login details to SSH/RDP to a compute node in cluster. - All required parameters must be populated in order to send to Azure. - - :param node_id: Required. Id of the compute node - :type node_id: str - :param ip_address: Required. ip address - :type ip_address: str - :param port: Required. port number. - :type port: float + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar node_id: Id of the compute node + :vartype node_id: str + :ivar ip_address: ip address + :vartype ip_address: str + :ivar port: port number. + :vartype port: float """ _validation = { - 'node_id': {'required': True}, - 'ip_address': {'required': True}, - 'port': {'required': True}, + 'node_id': {'readonly': True}, + 'ip_address': {'readonly': True}, + 'port': {'readonly': True}, } _attribute_map = { @@ -39,6 +40,6 @@ class RemoteLoginInformation(Model): def __init__(self, **kwargs): super(RemoteLoginInformation, self).__init__(**kwargs) - self.node_id = kwargs.get('node_id', None) - self.ip_address = kwargs.get('ip_address', None) - self.port = kwargs.get('port', None) + self.node_id = None + self.ip_address = None + self.port = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/remote_login_information_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/remote_login_information_py3.py index 6007e5e7fcfc..19344ab20dea 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/remote_login_information_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/remote_login_information_py3.py @@ -15,20 +15,21 @@ class RemoteLoginInformation(Model): """Contains remote login details to SSH/RDP to a compute node in cluster. - All required parameters must be populated in order to send to Azure. - - :param node_id: Required. Id of the compute node - :type node_id: str - :param ip_address: Required. ip address - :type ip_address: str - :param port: Required. port number. - :type port: float + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar node_id: Id of the compute node + :vartype node_id: str + :ivar ip_address: ip address + :vartype ip_address: str + :ivar port: port number. + :vartype port: float """ _validation = { - 'node_id': {'required': True}, - 'ip_address': {'required': True}, - 'port': {'required': True}, + 'node_id': {'readonly': True}, + 'ip_address': {'readonly': True}, + 'port': {'readonly': True}, } _attribute_map = { @@ -37,8 +38,8 @@ class RemoteLoginInformation(Model): 'port': {'key': 'port', 'type': 'float'}, } - def __init__(self, *, node_id: str, ip_address: str, port: float, **kwargs) -> None: + def __init__(self, **kwargs) -> None: super(RemoteLoginInformation, self).__init__(**kwargs) - self.node_id = node_id - self.ip_address = ip_address - self.port = port + self.node_id = None + self.ip_address = None + self.port = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/setup_task.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/setup_task.py index 1a6e3eb13f8e..d06c898584c6 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/setup_task.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/setup_task.py @@ -21,7 +21,10 @@ class SetupTask(Model): All required parameters must be populated in order to send to Azure. - :param command_line: Required. Command Line to start Setup process. + :param command_line: Required. Command line to be executed on each + cluster's node after it being allocated or rebooted. Command line to be + executed on each cluster's node after it being allocated or rebooted. The + command is executed in a bash subshell as a root. :type command_line: str :param environment_variables: Collection of environment variables to be set for setup task. @@ -32,11 +35,6 @@ class SetupTask(Model): back. :type secrets: list[~azure.mgmt.batchai.models.EnvironmentVariableWithSecretValue] - :param run_elevated: Specifies whether to run the setup task under root - account. The default value is false. Note. Non-elevated tasks are run - under an account added into sudoer list and can perform sudo when - required. Default value: False . - :type run_elevated: bool :param std_out_err_path_prefix: Required. The prefix of a path where the Batch AI service will upload the stdout and stderr of the setup task. :type std_out_err_path_prefix: str @@ -59,7 +57,6 @@ class SetupTask(Model): 'command_line': {'key': 'commandLine', 'type': 'str'}, 'environment_variables': {'key': 'environmentVariables', 'type': '[EnvironmentVariable]'}, 'secrets': {'key': 'secrets', 'type': '[EnvironmentVariableWithSecretValue]'}, - 'run_elevated': {'key': 'runElevated', 'type': 'bool'}, 'std_out_err_path_prefix': {'key': 'stdOutErrPathPrefix', 'type': 'str'}, 'std_out_err_path_suffix': {'key': 'stdOutErrPathSuffix', 'type': 'str'}, } @@ -69,6 +66,5 @@ def __init__(self, **kwargs): self.command_line = kwargs.get('command_line', None) self.environment_variables = kwargs.get('environment_variables', None) self.secrets = kwargs.get('secrets', None) - self.run_elevated = kwargs.get('run_elevated', False) self.std_out_err_path_prefix = kwargs.get('std_out_err_path_prefix', None) self.std_out_err_path_suffix = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/setup_task_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/setup_task_py3.py index 74a53144a6c5..53a5d517a0df 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/setup_task_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/setup_task_py3.py @@ -21,7 +21,10 @@ class SetupTask(Model): All required parameters must be populated in order to send to Azure. - :param command_line: Required. Command Line to start Setup process. + :param command_line: Required. Command line to be executed on each + cluster's node after it being allocated or rebooted. Command line to be + executed on each cluster's node after it being allocated or rebooted. The + command is executed in a bash subshell as a root. :type command_line: str :param environment_variables: Collection of environment variables to be set for setup task. @@ -32,11 +35,6 @@ class SetupTask(Model): back. :type secrets: list[~azure.mgmt.batchai.models.EnvironmentVariableWithSecretValue] - :param run_elevated: Specifies whether to run the setup task under root - account. The default value is false. Note. Non-elevated tasks are run - under an account added into sudoer list and can perform sudo when - required. Default value: False . - :type run_elevated: bool :param std_out_err_path_prefix: Required. The prefix of a path where the Batch AI service will upload the stdout and stderr of the setup task. :type std_out_err_path_prefix: str @@ -59,16 +57,14 @@ class SetupTask(Model): 'command_line': {'key': 'commandLine', 'type': 'str'}, 'environment_variables': {'key': 'environmentVariables', 'type': '[EnvironmentVariable]'}, 'secrets': {'key': 'secrets', 'type': '[EnvironmentVariableWithSecretValue]'}, - 'run_elevated': {'key': 'runElevated', 'type': 'bool'}, 'std_out_err_path_prefix': {'key': 'stdOutErrPathPrefix', 'type': 'str'}, 'std_out_err_path_suffix': {'key': 'stdOutErrPathSuffix', 'type': 'str'}, } - def __init__(self, *, command_line: str, std_out_err_path_prefix: str, environment_variables=None, secrets=None, run_elevated: bool=False, **kwargs) -> None: + def __init__(self, *, command_line: str, std_out_err_path_prefix: str, environment_variables=None, secrets=None, **kwargs) -> None: super(SetupTask, self).__init__(**kwargs) self.command_line = command_line self.environment_variables = environment_variables self.secrets = secrets - self.run_elevated = run_elevated self.std_out_err_path_prefix = std_out_err_path_prefix self.std_out_err_path_suffix = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/usage.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/usage.py index fa677eb86082..8d83baaff718 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/usage.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/usage.py @@ -18,24 +18,22 @@ class Usage(Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - - :ivar unit: Required. An enum describing the unit of usage measurement. - Default value: "Count" . - :vartype unit: str - :param current_value: Required. The current usage of the resource. - :type current_value: int - :param limit: Required. The maximum permitted usage of the resource. - :type limit: long - :param name: Required. The name of the type of usage. - :type name: ~azure.mgmt.batchai.models.UsageName + :ivar unit: An enum describing the unit of usage measurement. Possible + values include: 'Count' + :vartype unit: str or ~azure.mgmt.batchai.models.UsageUnit + :ivar current_value: The current usage of the resource. + :vartype current_value: int + :ivar limit: The maximum permitted usage of the resource. + :vartype limit: long + :ivar name: The name of the type of usage. + :vartype name: ~azure.mgmt.batchai.models.UsageName """ _validation = { - 'unit': {'required': True, 'constant': True}, - 'current_value': {'required': True}, - 'limit': {'required': True}, - 'name': {'required': True}, + 'unit': {'readonly': True}, + 'current_value': {'readonly': True}, + 'limit': {'readonly': True}, + 'name': {'readonly': True}, } _attribute_map = { @@ -45,10 +43,9 @@ class Usage(Model): 'name': {'key': 'name', 'type': 'UsageName'}, } - unit = "Count" - def __init__(self, **kwargs): super(Usage, self).__init__(**kwargs) - self.current_value = kwargs.get('current_value', None) - self.limit = kwargs.get('limit', None) - self.name = kwargs.get('name', None) + self.unit = None + self.current_value = None + self.limit = None + self.name = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/usage_name.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/usage_name.py index e2560936493e..9140d61b9b98 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/usage_name.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/usage_name.py @@ -15,12 +15,20 @@ class UsageName(Model): """The Usage Names. - :param value: The name of the resource. - :type value: str - :param localized_value: The localized name of the resource. - :type localized_value: str + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar value: The name of the resource. + :vartype value: str + :ivar localized_value: The localized name of the resource. + :vartype localized_value: str """ + _validation = { + 'value': {'readonly': True}, + 'localized_value': {'readonly': True}, + } + _attribute_map = { 'value': {'key': 'value', 'type': 'str'}, 'localized_value': {'key': 'localizedValue', 'type': 'str'}, @@ -28,5 +36,5 @@ class UsageName(Model): def __init__(self, **kwargs): super(UsageName, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.localized_value = kwargs.get('localized_value', None) + self.value = None + self.localized_value = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/usage_name_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/usage_name_py3.py index ad0b77459f75..5b6c039f8a9a 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/usage_name_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/usage_name_py3.py @@ -15,18 +15,26 @@ class UsageName(Model): """The Usage Names. - :param value: The name of the resource. - :type value: str - :param localized_value: The localized name of the resource. - :type localized_value: str + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar value: The name of the resource. + :vartype value: str + :ivar localized_value: The localized name of the resource. + :vartype localized_value: str """ + _validation = { + 'value': {'readonly': True}, + 'localized_value': {'readonly': True}, + } + _attribute_map = { 'value': {'key': 'value', 'type': 'str'}, 'localized_value': {'key': 'localizedValue', 'type': 'str'}, } - def __init__(self, *, value: str=None, localized_value: str=None, **kwargs) -> None: + def __init__(self, **kwargs) -> None: super(UsageName, self).__init__(**kwargs) - self.value = value - self.localized_value = localized_value + self.value = None + self.localized_value = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/usage_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/usage_py3.py index 7a352c5fdb7b..b8914604b443 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/usage_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/usage_py3.py @@ -18,24 +18,22 @@ class Usage(Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - - :ivar unit: Required. An enum describing the unit of usage measurement. - Default value: "Count" . - :vartype unit: str - :param current_value: Required. The current usage of the resource. - :type current_value: int - :param limit: Required. The maximum permitted usage of the resource. - :type limit: long - :param name: Required. The name of the type of usage. - :type name: ~azure.mgmt.batchai.models.UsageName + :ivar unit: An enum describing the unit of usage measurement. Possible + values include: 'Count' + :vartype unit: str or ~azure.mgmt.batchai.models.UsageUnit + :ivar current_value: The current usage of the resource. + :vartype current_value: int + :ivar limit: The maximum permitted usage of the resource. + :vartype limit: long + :ivar name: The name of the type of usage. + :vartype name: ~azure.mgmt.batchai.models.UsageName """ _validation = { - 'unit': {'required': True, 'constant': True}, - 'current_value': {'required': True}, - 'limit': {'required': True}, - 'name': {'required': True}, + 'unit': {'readonly': True}, + 'current_value': {'readonly': True}, + 'limit': {'readonly': True}, + 'name': {'readonly': True}, } _attribute_map = { @@ -45,10 +43,9 @@ class Usage(Model): 'name': {'key': 'name', 'type': 'UsageName'}, } - unit = "Count" - - def __init__(self, *, current_value: int, limit: int, name, **kwargs) -> None: + def __init__(self, **kwargs) -> None: super(Usage, self).__init__(**kwargs) - self.current_value = current_value - self.limit = limit - self.name = name + self.unit = None + self.current_value = None + self.limit = None + self.name = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/workspace.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/workspace.py new file mode 100644 index 000000000000..cba6f8832213 --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/workspace.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .resource import Resource + + +class Workspace(Resource): + """Describes Batch AI Workspace. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The ID of the resource + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource + :vartype type: str + :ivar location: The location of the resource + :vartype location: str + :ivar tags: The tags of the resource + :vartype tags: dict[str, str] + :ivar creation_time: Time when the Workspace was created. + :vartype creation_time: datetime + :ivar provisioning_state: The provisioned state of the workspace. Possible + values include: 'creating', 'succeeded', 'failed', 'deleting' + :vartype provisioning_state: str or + ~azure.mgmt.batchai.models.ProvisioningState + :ivar provisioning_state_transition_time: The time at which the workspace + entered its current provisioning state. The time at which the workspace + entered its current provisioning state. + :vartype provisioning_state_transition_time: datetime + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'readonly': True}, + 'tags': {'readonly': True}, + 'creation_time': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'provisioning_state_transition_time': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'provisioning_state_transition_time': {'key': 'properties.provisioningStateTransitionTime', 'type': 'iso-8601'}, + } + + def __init__(self, **kwargs): + super(Workspace, self).__init__(**kwargs) + self.creation_time = None + self.provisioning_state = None + self.provisioning_state_transition_time = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/workspace_create_parameters.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/workspace_create_parameters.py new file mode 100644 index 000000000000..22ef22f6330b --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/workspace_create_parameters.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class WorkspaceCreateParameters(Model): + """Parameters supplied to the Create operation. + + All required parameters must be populated in order to send to Azure. + + :param location: Required. The region in which to create the Workspace. + :type location: str + :param tags: The user specified tags associated with the Workspace. + :type tags: dict[str, str] + """ + + _validation = { + 'location': {'required': True}, + } + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, **kwargs): + super(WorkspaceCreateParameters, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.tags = kwargs.get('tags', None) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/workspace_create_parameters_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/workspace_create_parameters_py3.py new file mode 100644 index 000000000000..9af28f600055 --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/workspace_create_parameters_py3.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class WorkspaceCreateParameters(Model): + """Parameters supplied to the Create operation. + + All required parameters must be populated in order to send to Azure. + + :param location: Required. The region in which to create the Workspace. + :type location: str + :param tags: The user specified tags associated with the Workspace. + :type tags: dict[str, str] + """ + + _validation = { + 'location': {'required': True}, + } + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, *, location: str, tags=None, **kwargs) -> None: + super(WorkspaceCreateParameters, self).__init__(**kwargs) + self.location = location + self.tags = tags diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/workspace_paged.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/workspace_paged.py new file mode 100644 index 000000000000..d264cf41ecc9 --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/workspace_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class WorkspacePaged(Paged): + """ + A paging container for iterating over a list of :class:`Workspace ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[Workspace]'} + } + + def __init__(self, *args, **kwargs): + + super(WorkspacePaged, self).__init__(*args, **kwargs) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/workspace_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/workspace_py3.py new file mode 100644 index 000000000000..85904b666b3f --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/workspace_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .resource import Resource + + +class Workspace(Resource): + """Describes Batch AI Workspace. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The ID of the resource + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource + :vartype type: str + :ivar location: The location of the resource + :vartype location: str + :ivar tags: The tags of the resource + :vartype tags: dict[str, str] + :ivar creation_time: Time when the Workspace was created. + :vartype creation_time: datetime + :ivar provisioning_state: The provisioned state of the workspace. Possible + values include: 'creating', 'succeeded', 'failed', 'deleting' + :vartype provisioning_state: str or + ~azure.mgmt.batchai.models.ProvisioningState + :ivar provisioning_state_transition_time: The time at which the workspace + entered its current provisioning state. The time at which the workspace + entered its current provisioning state. + :vartype provisioning_state_transition_time: datetime + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'readonly': True}, + 'tags': {'readonly': True}, + 'creation_time': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'provisioning_state_transition_time': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'provisioning_state_transition_time': {'key': 'properties.provisioningStateTransitionTime', 'type': 'iso-8601'}, + } + + def __init__(self, **kwargs) -> None: + super(Workspace, self).__init__(**kwargs) + self.creation_time = None + self.provisioning_state = None + self.provisioning_state_transition_time = None diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/jobs_list_by_resource_group_options.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/workspaces_list_by_resource_group_options.py similarity index 61% rename from azure-mgmt-batchai/azure/mgmt/batchai/models/jobs_list_by_resource_group_options.py rename to azure-mgmt-batchai/azure/mgmt/batchai/models/workspaces_list_by_resource_group_options.py index ab272232d23d..8dcfd0f583e6 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/jobs_list_by_resource_group_options.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/workspaces_list_by_resource_group_options.py @@ -12,28 +12,18 @@ from msrest.serialization import Model -class JobsListByResourceGroupOptions(Model): +class WorkspacesListByResourceGroupOptions(Model): """Additional parameters for list_by_resource_group operation. - :param filter: An OData $filter clause.. Used to filter results that are - returned in the GET respnose. - :type filter: str - :param select: An OData $select clause. Used to select the properties to - be returned in the GET respnose. - :type select: str :param max_results: The maximum number of items to return in the response. A maximum of 1000 files can be returned. Default value: 1000 . :type max_results: int """ _attribute_map = { - 'filter': {'key': '', 'type': 'str'}, - 'select': {'key': '', 'type': 'str'}, 'max_results': {'key': '', 'type': 'int'}, } def __init__(self, **kwargs): - super(JobsListByResourceGroupOptions, self).__init__(**kwargs) - self.filter = kwargs.get('filter', None) - self.select = kwargs.get('select', None) + super(WorkspacesListByResourceGroupOptions, self).__init__(**kwargs) self.max_results = kwargs.get('max_results', 1000) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/jobs_list_by_resource_group_options_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/workspaces_list_by_resource_group_options_py3.py similarity index 58% rename from azure-mgmt-batchai/azure/mgmt/batchai/models/jobs_list_by_resource_group_options_py3.py rename to azure-mgmt-batchai/azure/mgmt/batchai/models/workspaces_list_by_resource_group_options_py3.py index befaa301bc38..b95c2456339d 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/jobs_list_by_resource_group_options_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/workspaces_list_by_resource_group_options_py3.py @@ -12,28 +12,18 @@ from msrest.serialization import Model -class JobsListByResourceGroupOptions(Model): +class WorkspacesListByResourceGroupOptions(Model): """Additional parameters for list_by_resource_group operation. - :param filter: An OData $filter clause.. Used to filter results that are - returned in the GET respnose. - :type filter: str - :param select: An OData $select clause. Used to select the properties to - be returned in the GET respnose. - :type select: str :param max_results: The maximum number of items to return in the response. A maximum of 1000 files can be returned. Default value: 1000 . :type max_results: int """ _attribute_map = { - 'filter': {'key': '', 'type': 'str'}, - 'select': {'key': '', 'type': 'str'}, 'max_results': {'key': '', 'type': 'int'}, } - def __init__(self, *, filter: str=None, select: str=None, max_results: int=1000, **kwargs) -> None: - super(JobsListByResourceGroupOptions, self).__init__(**kwargs) - self.filter = filter - self.select = select + def __init__(self, *, max_results: int=1000, **kwargs) -> None: + super(WorkspacesListByResourceGroupOptions, self).__init__(**kwargs) self.max_results = max_results diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/jobs_list_options.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/workspaces_list_options.py similarity index 62% rename from azure-mgmt-batchai/azure/mgmt/batchai/models/jobs_list_options.py rename to azure-mgmt-batchai/azure/mgmt/batchai/models/workspaces_list_options.py index d1dee3e5e427..0b47be0e4869 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/jobs_list_options.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/workspaces_list_options.py @@ -12,28 +12,18 @@ from msrest.serialization import Model -class JobsListOptions(Model): +class WorkspacesListOptions(Model): """Additional parameters for list operation. - :param filter: An OData $filter clause.. Used to filter results that are - returned in the GET respnose. - :type filter: str - :param select: An OData $select clause. Used to select the properties to - be returned in the GET respnose. - :type select: str :param max_results: The maximum number of items to return in the response. A maximum of 1000 files can be returned. Default value: 1000 . :type max_results: int """ _attribute_map = { - 'filter': {'key': '', 'type': 'str'}, - 'select': {'key': '', 'type': 'str'}, 'max_results': {'key': '', 'type': 'int'}, } def __init__(self, **kwargs): - super(JobsListOptions, self).__init__(**kwargs) - self.filter = kwargs.get('filter', None) - self.select = kwargs.get('select', None) + super(WorkspacesListOptions, self).__init__(**kwargs) self.max_results = kwargs.get('max_results', 1000) diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/models/jobs_list_options_py3.py b/azure-mgmt-batchai/azure/mgmt/batchai/models/workspaces_list_options_py3.py similarity index 58% rename from azure-mgmt-batchai/azure/mgmt/batchai/models/jobs_list_options_py3.py rename to azure-mgmt-batchai/azure/mgmt/batchai/models/workspaces_list_options_py3.py index 6fe87f9b90bc..f0fbdeb3c558 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/models/jobs_list_options_py3.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/models/workspaces_list_options_py3.py @@ -12,28 +12,18 @@ from msrest.serialization import Model -class JobsListOptions(Model): +class WorkspacesListOptions(Model): """Additional parameters for list operation. - :param filter: An OData $filter clause.. Used to filter results that are - returned in the GET respnose. - :type filter: str - :param select: An OData $select clause. Used to select the properties to - be returned in the GET respnose. - :type select: str :param max_results: The maximum number of items to return in the response. A maximum of 1000 files can be returned. Default value: 1000 . :type max_results: int """ _attribute_map = { - 'filter': {'key': '', 'type': 'str'}, - 'select': {'key': '', 'type': 'str'}, 'max_results': {'key': '', 'type': 'int'}, } - def __init__(self, *, filter: str=None, select: str=None, max_results: int=1000, **kwargs) -> None: - super(JobsListOptions, self).__init__(**kwargs) - self.filter = filter - self.select = select + def __init__(self, *, max_results: int=1000, **kwargs) -> None: + super(WorkspacesListOptions, self).__init__(**kwargs) self.max_results = max_results diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/operations/__init__.py b/azure-mgmt-batchai/azure/mgmt/batchai/operations/__init__.py index 0e0603fd6608..8281cd19b8ca 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/operations/__init__.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/operations/__init__.py @@ -10,15 +10,19 @@ # -------------------------------------------------------------------------- from .operations import Operations -from .usage_operations import UsageOperations +from .usages_operations import UsagesOperations from .clusters_operations import ClustersOperations -from .jobs_operations import JobsOperations from .file_servers_operations import FileServersOperations +from .workspaces_operations import WorkspacesOperations +from .experiments_operations import ExperimentsOperations +from .jobs_operations import JobsOperations __all__ = [ 'Operations', - 'UsageOperations', + 'UsagesOperations', 'ClustersOperations', - 'JobsOperations', 'FileServersOperations', + 'WorkspacesOperations', + 'ExperimentsOperations', + 'JobsOperations', ] diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/operations/clusters_operations.py b/azure-mgmt-batchai/azure/mgmt/batchai/operations/clusters_operations.py index 45ceda8bf1e3..8e2ca917e720 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/operations/clusters_operations.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/operations/clusters_operations.py @@ -25,7 +25,7 @@ class ClustersOperations(object): :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. - :ivar api_version: Specifies the version of API used for this request. Constant value: "2018-03-01". + :ivar api_version: Specifies the version of API used for this request. Constant value: "2018-05-01". """ models = models @@ -35,18 +35,172 @@ def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer - self.api_version = "2018-03-01" + self.api_version = "2018-05-01" self.config = config + def list( + self, clusters_list_options=None, custom_headers=None, raw=False, **operation_config): + """Gets a list of Clusters associated with the given subscription. + + :param clusters_list_options: Additional parameters for the operation + :type clusters_list_options: + ~azure.mgmt.batchai.models.ClustersListOptions + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of Cluster + :rtype: + ~azure.mgmt.batchai.models.ClusterPaged[~azure.mgmt.batchai.models.Cluster] + :raises: :class:`CloudError` + """ + max_results = None + if clusters_list_options is not None: + max_results = clusters_list_options.max_results + + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + if max_results is not None: + query_parameters['maxresults'] = self._serialize.query("max_results", max_results, 'int', maximum=1000, minimum=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.ClusterPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.ClusterPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.BatchAI/clusters'} + + def list_by_resource_group( + self, resource_group_name, clusters_list_by_resource_group_options=None, custom_headers=None, raw=False, **operation_config): + """Gets a list of Clusters within the specified resource group. + + :param resource_group_name: Name of the resource group to which the + resource belongs. + :type resource_group_name: str + :param clusters_list_by_resource_group_options: Additional parameters + for the operation + :type clusters_list_by_resource_group_options: + ~azure.mgmt.batchai.models.ClustersListByResourceGroupOptions + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of Cluster + :rtype: + ~azure.mgmt.batchai.models.ClusterPaged[~azure.mgmt.batchai.models.Cluster] + :raises: :class:`CloudError` + """ + max_results = None + if clusters_list_by_resource_group_options is not None: + max_results = clusters_list_by_resource_group_options.max_results + + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + if max_results is not None: + query_parameters['maxresults'] = self._serialize.query("max_results", max_results, 'int', maximum=1000, minimum=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.ClusterPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.ClusterPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/clusters'} + def _create_initial( - self, resource_group_name, cluster_name, parameters, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, workspace_name, cluster_name, parameters, custom_headers=None, raw=False, **operation_config): # Construct URL url = self.create.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) @@ -90,19 +244,23 @@ def _create_initial( return deserialized def create( - self, resource_group_name, cluster_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config): - """Adds a cluster. A cluster is a collection of compute nodes. Multiple - jobs can be run on the same cluster. + self, resource_group_name, workspace_name, cluster_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config): + """Creates a Cluster in the given Workspace. :param resource_group_name: Name of the resource group to which the resource belongs. :type resource_group_name: str + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str :param cluster_name: The name of the cluster within the specified resource group. Cluster names can only contain a combination of alphanumeric characters along with dash (-) and underscore (_). The name must be from 1 through 64 characters long. :type cluster_name: str - :param parameters: The parameters to provide for cluster creation. + :param parameters: The parameters to provide for the Cluster creation. :type parameters: ~azure.mgmt.batchai.models.ClusterCreateParameters :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the @@ -119,6 +277,7 @@ def create( """ raw_result = self._create_initial( resource_group_name=resource_group_name, + workspace_name=workspace_name, cluster_name=cluster_name, parameters=parameters, custom_headers=custom_headers, @@ -142,15 +301,20 @@ def get_long_running_output(response): elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/clusters/{clusterName}'} + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/clusters/{clusterName}'} def update( - self, resource_group_name, cluster_name, tags=None, scale_settings=None, custom_headers=None, raw=False, **operation_config): - """Update the properties of a given cluster. + self, resource_group_name, workspace_name, cluster_name, tags=None, scale_settings=None, custom_headers=None, raw=False, **operation_config): + """Updates properties of a Cluster. :param resource_group_name: Name of the resource group to which the resource belongs. :type resource_group_name: str + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str :param cluster_name: The name of the cluster within the specified resource group. Cluster names can only contain a combination of alphanumeric characters along with dash (-) and underscore (_). The @@ -176,7 +340,8 @@ def update( url = self.update.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) @@ -218,16 +383,17 @@ def update( return client_raw_response return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/clusters/{clusterName}'} + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/clusters/{clusterName}'} def _delete_initial( - self, resource_group_name, cluster_name, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, workspace_name, cluster_name, custom_headers=None, raw=False, **operation_config): # Construct URL url = self.delete.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) @@ -260,12 +426,17 @@ def _delete_initial( return client_raw_response def delete( - self, resource_group_name, cluster_name, custom_headers=None, raw=False, polling=True, **operation_config): + self, resource_group_name, workspace_name, cluster_name, custom_headers=None, raw=False, polling=True, **operation_config): """Deletes a Cluster. :param resource_group_name: Name of the resource group to which the resource belongs. :type resource_group_name: str + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str :param cluster_name: The name of the cluster within the specified resource group. Cluster names can only contain a combination of alphanumeric characters along with dash (-) and underscore (_). The @@ -284,6 +455,7 @@ def delete( """ raw_result = self._delete_initial( resource_group_name=resource_group_name, + workspace_name=workspace_name, cluster_name=cluster_name, custom_headers=custom_headers, raw=True, @@ -302,15 +474,20 @@ def get_long_running_output(response): elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/clusters/{clusterName}'} + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/clusters/{clusterName}'} def get( - self, resource_group_name, cluster_name, custom_headers=None, raw=False, **operation_config): - """Gets information about the specified Cluster. + self, resource_group_name, workspace_name, cluster_name, custom_headers=None, raw=False, **operation_config): + """Gets information about a Cluster. :param resource_group_name: Name of the resource group to which the resource belongs. :type resource_group_name: str + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str :param cluster_name: The name of the cluster within the specified resource group. Cluster names can only contain a combination of alphanumeric characters along with dash (-) and underscore (_). The @@ -330,7 +507,8 @@ def get( url = self.get.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) @@ -368,15 +546,20 @@ def get( return client_raw_response return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/clusters/{clusterName}'} + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/clusters/{clusterName}'} def list_remote_login_information( - self, resource_group_name, cluster_name, custom_headers=None, raw=False, **operation_config): - """Get the IP address, port of all the compute nodes in the cluster. + self, resource_group_name, workspace_name, cluster_name, custom_headers=None, raw=False, **operation_config): + """Get the IP address, port of all the compute nodes in the Cluster. :param resource_group_name: Name of the resource group to which the resource belongs. :type resource_group_name: str + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str :param cluster_name: The name of the cluster within the specified resource group. Cluster names can only contain a combination of alphanumeric characters along with dash (-) and underscore (_). The @@ -399,7 +582,8 @@ def internal_paging(next_link=None, raw=False): url = self.list_remote_login_information.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), - 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) @@ -443,104 +627,24 @@ def internal_paging(next_link=None, raw=False): return client_raw_response return deserialized - list_remote_login_information.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/clusters/{clusterName}/listRemoteLoginInformation'} - - def list( - self, clusters_list_options=None, custom_headers=None, raw=False, **operation_config): - """Gets information about the Clusters associated with the subscription. - - :param clusters_list_options: Additional parameters for the operation - :type clusters_list_options: - ~azure.mgmt.batchai.models.ClustersListOptions - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: An iterator like instance of Cluster - :rtype: - ~azure.mgmt.batchai.models.ClusterPaged[~azure.mgmt.batchai.models.Cluster] - :raises: :class:`CloudError` - """ - filter = None - if clusters_list_options is not None: - filter = clusters_list_options.filter - select = None - if clusters_list_options is not None: - select = clusters_list_options.select - max_results = None - if clusters_list_options is not None: - max_results = clusters_list_options.max_results + list_remote_login_information.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/clusters/{clusterName}/listRemoteLoginInformation'} - def internal_paging(next_link=None, raw=False): - - if not next_link: - # Construct URL - url = self.list.metadata['url'] - path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - if filter is not None: - query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') - if select is not None: - query_parameters['$select'] = self._serialize.query("select", select, 'str') - if max_results is not None: - query_parameters['maxresults'] = self._serialize.query("max_results", max_results, 'int', maximum=1000, minimum=1) - - else: - url = next_link - query_parameters = {} - - # Construct headers - header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send( - request, header_parameters, stream=False, **operation_config) - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - return response - - # Deserialize response - deserialized = models.ClusterPaged(internal_paging, self._deserialize.dependencies) - - if raw: - header_dict = {} - client_raw_response = models.ClusterPaged(internal_paging, self._deserialize.dependencies, header_dict) - return client_raw_response - - return deserialized - list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.BatchAI/clusters'} - - def list_by_resource_group( - self, resource_group_name, clusters_list_by_resource_group_options=None, custom_headers=None, raw=False, **operation_config): - """Gets information about the Clusters associated within the specified - resource group. + def list_by_workspace( + self, resource_group_name, workspace_name, clusters_list_by_workspace_options=None, custom_headers=None, raw=False, **operation_config): + """Gets information about Clusters associated with the given Workspace. :param resource_group_name: Name of the resource group to which the resource belongs. :type resource_group_name: str - :param clusters_list_by_resource_group_options: Additional parameters - for the operation - :type clusters_list_by_resource_group_options: - ~azure.mgmt.batchai.models.ClustersListByResourceGroupOptions + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str + :param clusters_list_by_workspace_options: Additional parameters for + the operation + :type clusters_list_by_workspace_options: + ~azure.mgmt.batchai.models.ClustersListByWorkspaceOptions :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response @@ -551,23 +655,18 @@ def list_by_resource_group( ~azure.mgmt.batchai.models.ClusterPaged[~azure.mgmt.batchai.models.Cluster] :raises: :class:`CloudError` """ - filter = None - if clusters_list_by_resource_group_options is not None: - filter = clusters_list_by_resource_group_options.filter - select = None - if clusters_list_by_resource_group_options is not None: - select = clusters_list_by_resource_group_options.select max_results = None - if clusters_list_by_resource_group_options is not None: - max_results = clusters_list_by_resource_group_options.max_results + if clusters_list_by_workspace_options is not None: + max_results = clusters_list_by_workspace_options.max_results def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL - url = self.list_by_resource_group.metadata['url'] + url = self.list_by_workspace.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) @@ -575,10 +674,6 @@ def internal_paging(next_link=None, raw=False): # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - if filter is not None: - query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') - if select is not None: - query_parameters['$select'] = self._serialize.query("select", select, 'str') if max_results is not None: query_parameters['maxresults'] = self._serialize.query("max_results", max_results, 'int', maximum=1000, minimum=1) @@ -617,4 +712,4 @@ def internal_paging(next_link=None, raw=False): return client_raw_response return deserialized - list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/clusters'} + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/clusters'} diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/operations/experiments_operations.py b/azure-mgmt-batchai/azure/mgmt/batchai/operations/experiments_operations.py new file mode 100644 index 000000000000..14eca2230622 --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/operations/experiments_operations.py @@ -0,0 +1,400 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling + +from .. import models + + +class ExperimentsOperations(object): + """ExperimentsOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: Specifies the version of API used for this request. Constant value: "2018-05-01". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2018-05-01" + + self.config = config + + def list_by_workspace( + self, resource_group_name, workspace_name, experiments_list_by_workspace_options=None, custom_headers=None, raw=False, **operation_config): + """Gets a list of Experiments within the specified Workspace. + + :param resource_group_name: Name of the resource group to which the + resource belongs. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str + :param experiments_list_by_workspace_options: Additional parameters + for the operation + :type experiments_list_by_workspace_options: + ~azure.mgmt.batchai.models.ExperimentsListByWorkspaceOptions + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of Experiment + :rtype: + ~azure.mgmt.batchai.models.ExperimentPaged[~azure.mgmt.batchai.models.Experiment] + :raises: :class:`CloudError` + """ + max_results = None + if experiments_list_by_workspace_options is not None: + max_results = experiments_list_by_workspace_options.max_results + + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = self.list_by_workspace.metadata['url'] + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + if max_results is not None: + query_parameters['maxresults'] = self._serialize.query("max_results", max_results, 'int', maximum=1000, minimum=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.ExperimentPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.ExperimentPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/experiments'} + + + def _create_initial( + self, resource_group_name, workspace_name, experiment_name, parameters, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.create.metadata['url'] + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'experimentName': self._serialize.url("experiment_name", experiment_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'object') + + # Construct and send request + request = self._client.put(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('Experiment', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def create( + self, resource_group_name, workspace_name, experiment_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config): + """Creates an Experiment. + + :param resource_group_name: Name of the resource group to which the + resource belongs. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str + :param experiment_name: The name of the experiment. Experiment names + can only contain a combination of alphanumeric characters along with + dash (-) and underscore (_). The name must be from 1 through 64 + characters long. + :type experiment_name: str + :param parameters: The parameters to provide for the experiment + creation. + :type parameters: object + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns Experiment or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.batchai.models.Experiment] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.batchai.models.Experiment]] + :raises: :class:`CloudError` + """ + raw_result = self._create_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + experiment_name=experiment_name, + parameters=parameters, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('Experiment', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/experiments/{experimentName}'} + + + def _delete_initial( + self, resource_group_name, workspace_name, experiment_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'experimentName': self._serialize.url("experiment_name", experiment_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) + + if response.status_code not in [200, 202, 204]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def delete( + self, resource_group_name, workspace_name, experiment_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Deletes an Experiment. + + :param resource_group_name: Name of the resource group to which the + resource belongs. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str + :param experiment_name: The name of the experiment. Experiment names + can only contain a combination of alphanumeric characters along with + dash (-) and underscore (_). The name must be from 1 through 64 + characters long. + :type experiment_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: :class:`CloudError` + """ + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + experiment_name=experiment_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/experiments/{experimentName}'} + + def get( + self, resource_group_name, workspace_name, experiment_name, custom_headers=None, raw=False, **operation_config): + """Gets information about an Experiment. + + :param resource_group_name: Name of the resource group to which the + resource belongs. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str + :param experiment_name: The name of the experiment. Experiment names + can only contain a combination of alphanumeric characters along with + dash (-) and underscore (_). The name must be from 1 through 64 + characters long. + :type experiment_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: Experiment or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.batchai.models.Experiment or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'experimentName': self._serialize.url("experiment_name", experiment_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('Experiment', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/experiments/{experimentName}'} diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/operations/file_servers_operations.py b/azure-mgmt-batchai/azure/mgmt/batchai/operations/file_servers_operations.py index 69f13c2fe274..dbc865b2d7cb 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/operations/file_servers_operations.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/operations/file_servers_operations.py @@ -25,7 +25,7 @@ class FileServersOperations(object): :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. - :ivar api_version: Specifies the version of API used for this request. Constant value: "2018-03-01". + :ivar api_version: Specifies the version of API used for this request. Constant value: "2018-05-01". """ models = models @@ -35,18 +35,173 @@ def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer - self.api_version = "2018-03-01" + self.api_version = "2018-05-01" self.config = config + def list( + self, file_servers_list_options=None, custom_headers=None, raw=False, **operation_config): + """Gets a list of File Servers associated with the given subscription. + + :param file_servers_list_options: Additional parameters for the + operation + :type file_servers_list_options: + ~azure.mgmt.batchai.models.FileServersListOptions + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of FileServer + :rtype: + ~azure.mgmt.batchai.models.FileServerPaged[~azure.mgmt.batchai.models.FileServer] + :raises: :class:`CloudError` + """ + max_results = None + if file_servers_list_options is not None: + max_results = file_servers_list_options.max_results + + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + if max_results is not None: + query_parameters['maxresults'] = self._serialize.query("max_results", max_results, 'int', maximum=1000, minimum=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.FileServerPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.FileServerPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.BatchAI/fileServers'} + + def list_by_resource_group( + self, resource_group_name, file_servers_list_by_resource_group_options=None, custom_headers=None, raw=False, **operation_config): + """Gets a list of File Servers within the specified resource group. + + :param resource_group_name: Name of the resource group to which the + resource belongs. + :type resource_group_name: str + :param file_servers_list_by_resource_group_options: Additional + parameters for the operation + :type file_servers_list_by_resource_group_options: + ~azure.mgmt.batchai.models.FileServersListByResourceGroupOptions + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of FileServer + :rtype: + ~azure.mgmt.batchai.models.FileServerPaged[~azure.mgmt.batchai.models.FileServer] + :raises: :class:`CloudError` + """ + max_results = None + if file_servers_list_by_resource_group_options is not None: + max_results = file_servers_list_by_resource_group_options.max_results + + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + if max_results is not None: + query_parameters['maxresults'] = self._serialize.query("max_results", max_results, 'int', maximum=1000, minimum=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.FileServerPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.FileServerPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/fileServers'} + def _create_initial( - self, resource_group_name, file_server_name, parameters, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, workspace_name, file_server_name, parameters, custom_headers=None, raw=False, **operation_config): # Construct URL url = self.create.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), - 'fileServerName': self._serialize.url("file_server_name", file_server_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'fileServerName': self._serialize.url("file_server_name", file_server_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) @@ -90,18 +245,23 @@ def _create_initial( return deserialized def create( - self, resource_group_name, file_server_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config): - """Creates a file server. + self, resource_group_name, workspace_name, file_server_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config): + """Creates a File Server in the given workspace. :param resource_group_name: Name of the resource group to which the resource belongs. :type resource_group_name: str + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str :param file_server_name: The name of the file server within the specified resource group. File server names can only contain a combination of alphanumeric characters along with dash (-) and underscore (_). The name must be from 1 through 64 characters long. :type file_server_name: str - :param parameters: The parameters to provide for file server creation. + :param parameters: The parameters to provide for File Server creation. :type parameters: ~azure.mgmt.batchai.models.FileServerCreateParameters :param dict custom_headers: headers that will be added to the request @@ -119,6 +279,7 @@ def create( """ raw_result = self._create_initial( resource_group_name=resource_group_name, + workspace_name=workspace_name, file_server_name=file_server_name, parameters=parameters, custom_headers=custom_headers, @@ -142,16 +303,17 @@ def get_long_running_output(response): elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/fileServers/{fileServerName}'} + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/fileServers/{fileServerName}'} def _delete_initial( - self, resource_group_name, file_server_name, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, workspace_name, file_server_name, custom_headers=None, raw=False, **operation_config): # Construct URL url = self.delete.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), - 'fileServerName': self._serialize.url("file_server_name", file_server_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'fileServerName': self._serialize.url("file_server_name", file_server_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) @@ -184,12 +346,17 @@ def _delete_initial( return client_raw_response def delete( - self, resource_group_name, file_server_name, custom_headers=None, raw=False, polling=True, **operation_config): - """Delete a file Server. + self, resource_group_name, workspace_name, file_server_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Deletes a File Server. :param resource_group_name: Name of the resource group to which the resource belongs. :type resource_group_name: str + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str :param file_server_name: The name of the file server within the specified resource group. File server names can only contain a combination of alphanumeric characters along with dash (-) and @@ -208,6 +375,7 @@ def delete( """ raw_result = self._delete_initial( resource_group_name=resource_group_name, + workspace_name=workspace_name, file_server_name=file_server_name, custom_headers=custom_headers, raw=True, @@ -226,15 +394,20 @@ def get_long_running_output(response): elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/fileServers/{fileServerName}'} + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/fileServers/{fileServerName}'} def get( - self, resource_group_name, file_server_name, custom_headers=None, raw=False, **operation_config): - """Gets information about the specified Cluster. + self, resource_group_name, workspace_name, file_server_name, custom_headers=None, raw=False, **operation_config): + """Gets information about a File Server. :param resource_group_name: Name of the resource group to which the resource belongs. :type resource_group_name: str + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str :param file_server_name: The name of the file server within the specified resource group. File server names can only contain a combination of alphanumeric characters along with dash (-) and @@ -254,7 +427,8 @@ def get( url = self.get.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), - 'fileServerName': self._serialize.url("file_server_name", file_server_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'fileServerName': self._serialize.url("file_server_name", file_server_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) @@ -292,106 +466,24 @@ def get( return client_raw_response return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/fileServers/{fileServerName}'} - - def list( - self, file_servers_list_options=None, custom_headers=None, raw=False, **operation_config): - """To list all the file servers available under the given subscription - (and across all resource groups within that subscription). - - :param file_servers_list_options: Additional parameters for the - operation - :type file_servers_list_options: - ~azure.mgmt.batchai.models.FileServersListOptions - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: An iterator like instance of FileServer - :rtype: - ~azure.mgmt.batchai.models.FileServerPaged[~azure.mgmt.batchai.models.FileServer] - :raises: :class:`CloudError` - """ - filter = None - if file_servers_list_options is not None: - filter = file_servers_list_options.filter - select = None - if file_servers_list_options is not None: - select = file_servers_list_options.select - max_results = None - if file_servers_list_options is not None: - max_results = file_servers_list_options.max_results - - def internal_paging(next_link=None, raw=False): - - if not next_link: - # Construct URL - url = self.list.metadata['url'] - path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - if filter is not None: - query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') - if select is not None: - query_parameters['$select'] = self._serialize.query("select", select, 'str') - if max_results is not None: - query_parameters['maxresults'] = self._serialize.query("max_results", max_results, 'int', maximum=1000, minimum=1) - - else: - url = next_link - query_parameters = {} - - # Construct headers - header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send( - request, header_parameters, stream=False, **operation_config) - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - return response - - # Deserialize response - deserialized = models.FileServerPaged(internal_paging, self._deserialize.dependencies) - - if raw: - header_dict = {} - client_raw_response = models.FileServerPaged(internal_paging, self._deserialize.dependencies, header_dict) - return client_raw_response - - return deserialized - list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.BatchAI/fileServers'} + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/fileServers/{fileServerName}'} - def list_by_resource_group( - self, resource_group_name, file_servers_list_by_resource_group_options=None, custom_headers=None, raw=False, **operation_config): - """Gets a formatted list of file servers and their properties associated - within the specified resource group. + def list_by_workspace( + self, resource_group_name, workspace_name, file_servers_list_by_workspace_options=None, custom_headers=None, raw=False, **operation_config): + """Gets a list of File Servers associated with the specified workspace. :param resource_group_name: Name of the resource group to which the resource belongs. :type resource_group_name: str - :param file_servers_list_by_resource_group_options: Additional - parameters for the operation - :type file_servers_list_by_resource_group_options: - ~azure.mgmt.batchai.models.FileServersListByResourceGroupOptions + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str + :param file_servers_list_by_workspace_options: Additional parameters + for the operation + :type file_servers_list_by_workspace_options: + ~azure.mgmt.batchai.models.FileServersListByWorkspaceOptions :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response @@ -402,23 +494,18 @@ def list_by_resource_group( ~azure.mgmt.batchai.models.FileServerPaged[~azure.mgmt.batchai.models.FileServer] :raises: :class:`CloudError` """ - filter = None - if file_servers_list_by_resource_group_options is not None: - filter = file_servers_list_by_resource_group_options.filter - select = None - if file_servers_list_by_resource_group_options is not None: - select = file_servers_list_by_resource_group_options.select max_results = None - if file_servers_list_by_resource_group_options is not None: - max_results = file_servers_list_by_resource_group_options.max_results + if file_servers_list_by_workspace_options is not None: + max_results = file_servers_list_by_workspace_options.max_results def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL - url = self.list_by_resource_group.metadata['url'] + url = self.list_by_workspace.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) @@ -426,10 +513,6 @@ def internal_paging(next_link=None, raw=False): # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - if filter is not None: - query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') - if select is not None: - query_parameters['$select'] = self._serialize.query("select", select, 'str') if max_results is not None: query_parameters['maxresults'] = self._serialize.query("max_results", max_results, 'int', maximum=1000, minimum=1) @@ -468,4 +551,4 @@ def internal_paging(next_link=None, raw=False): return client_raw_response return deserialized - list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/fileServers'} + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/fileServers'} diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/operations/jobs_operations.py b/azure-mgmt-batchai/azure/mgmt/batchai/operations/jobs_operations.py index 699f371251c6..6d29f0894fd3 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/operations/jobs_operations.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/operations/jobs_operations.py @@ -25,7 +25,7 @@ class JobsOperations(object): :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. - :ivar api_version: Specifies the version of API used for this request. Constant value: "2018-03-01". + :ivar api_version: Specifies the version of API used for this request. Constant value: "2018-05-01". """ models = models @@ -35,18 +35,111 @@ def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer - self.api_version = "2018-03-01" + self.api_version = "2018-05-01" self.config = config + def list_by_experiment( + self, resource_group_name, workspace_name, experiment_name, jobs_list_by_experiment_options=None, custom_headers=None, raw=False, **operation_config): + """Gets a list of Jobs within the specified Experiment. + + :param resource_group_name: Name of the resource group to which the + resource belongs. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str + :param experiment_name: The name of the experiment. Experiment names + can only contain a combination of alphanumeric characters along with + dash (-) and underscore (_). The name must be from 1 through 64 + characters long. + :type experiment_name: str + :param jobs_list_by_experiment_options: Additional parameters for the + operation + :type jobs_list_by_experiment_options: + ~azure.mgmt.batchai.models.JobsListByExperimentOptions + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of Job + :rtype: + ~azure.mgmt.batchai.models.JobPaged[~azure.mgmt.batchai.models.Job] + :raises: :class:`CloudError` + """ + max_results = None + if jobs_list_by_experiment_options is not None: + max_results = jobs_list_by_experiment_options.max_results + + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = self.list_by_experiment.metadata['url'] + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'experimentName': self._serialize.url("experiment_name", experiment_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + if max_results is not None: + query_parameters['maxresults'] = self._serialize.query("max_results", max_results, 'int', maximum=1000, minimum=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.JobPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.JobPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + list_by_experiment.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/experiments/{experimentName}/jobs'} + def _create_initial( - self, resource_group_name, job_name, parameters, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, workspace_name, experiment_name, job_name, parameters, custom_headers=None, raw=False, **operation_config): # Construct URL url = self.create.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'experimentName': self._serialize.url("experiment_name", experiment_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) @@ -90,12 +183,22 @@ def _create_initial( return deserialized def create( - self, resource_group_name, job_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config): - """Adds a Job that gets executed on a cluster. + self, resource_group_name, workspace_name, experiment_name, job_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config): + """Creates a Job in the given Experiment. :param resource_group_name: Name of the resource group to which the resource belongs. :type resource_group_name: str + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str + :param experiment_name: The name of the experiment. Experiment names + can only contain a combination of alphanumeric characters along with + dash (-) and underscore (_). The name must be from 1 through 64 + characters long. + :type experiment_name: str :param job_name: The name of the job within the specified resource group. Job names can only contain a combination of alphanumeric characters along with dash (-) and underscore (_). The name must be @@ -118,6 +221,8 @@ def create( """ raw_result = self._create_initial( resource_group_name=resource_group_name, + workspace_name=workspace_name, + experiment_name=experiment_name, job_name=job_name, parameters=parameters, custom_headers=custom_headers, @@ -141,16 +246,18 @@ def get_long_running_output(response): elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/jobs/{jobName}'} + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/experiments/{experimentName}/jobs/{jobName}'} def _delete_initial( - self, resource_group_name, job_name, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, workspace_name, experiment_name, job_name, custom_headers=None, raw=False, **operation_config): # Construct URL url = self.delete.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'experimentName': self._serialize.url("experiment_name", experiment_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) @@ -183,12 +290,22 @@ def _delete_initial( return client_raw_response def delete( - self, resource_group_name, job_name, custom_headers=None, raw=False, polling=True, **operation_config): - """Deletes the specified Batch AI job. + self, resource_group_name, workspace_name, experiment_name, job_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Deletes a Job. :param resource_group_name: Name of the resource group to which the resource belongs. :type resource_group_name: str + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str + :param experiment_name: The name of the experiment. Experiment names + can only contain a combination of alphanumeric characters along with + dash (-) and underscore (_). The name must be from 1 through 64 + characters long. + :type experiment_name: str :param job_name: The name of the job within the specified resource group. Job names can only contain a combination of alphanumeric characters along with dash (-) and underscore (_). The name must be @@ -207,6 +324,8 @@ def delete( """ raw_result = self._delete_initial( resource_group_name=resource_group_name, + workspace_name=workspace_name, + experiment_name=experiment_name, job_name=job_name, custom_headers=custom_headers, raw=True, @@ -225,15 +344,25 @@ def get_long_running_output(response): elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/jobs/{jobName}'} + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/experiments/{experimentName}/jobs/{jobName}'} def get( - self, resource_group_name, job_name, custom_headers=None, raw=False, **operation_config): - """Gets information about the specified Batch AI job. + self, resource_group_name, workspace_name, experiment_name, job_name, custom_headers=None, raw=False, **operation_config): + """Gets information about a Job. :param resource_group_name: Name of the resource group to which the resource belongs. :type resource_group_name: str + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str + :param experiment_name: The name of the experiment. Experiment names + can only contain a combination of alphanumeric characters along with + dash (-) and underscore (_). The name must be from 1 through 64 + characters long. + :type experiment_name: str :param job_name: The name of the job within the specified resource group. Job names can only contain a combination of alphanumeric characters along with dash (-) and underscore (_). The name must be @@ -253,7 +382,9 @@ def get( url = self.get.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'experimentName': self._serialize.url("experiment_name", experiment_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) @@ -291,39 +422,69 @@ def get( return client_raw_response return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/jobs/{jobName}'} + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/experiments/{experimentName}/jobs/{jobName}'} - def list_remote_login_information( - self, resource_group_name, job_name, custom_headers=None, raw=False, **operation_config): - """Gets the IP address and port information of all the compute nodes which - are used for job execution. + def list_output_files( + self, resource_group_name, workspace_name, experiment_name, job_name, jobs_list_output_files_options, custom_headers=None, raw=False, **operation_config): + """List all directories and files inside the given directory of the Job's + output directory (if the output directory is on Azure File Share or + Azure Storage Container). :param resource_group_name: Name of the resource group to which the resource belongs. :type resource_group_name: str + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str + :param experiment_name: The name of the experiment. Experiment names + can only contain a combination of alphanumeric characters along with + dash (-) and underscore (_). The name must be from 1 through 64 + characters long. + :type experiment_name: str :param job_name: The name of the job within the specified resource group. Job names can only contain a combination of alphanumeric characters along with dash (-) and underscore (_). The name must be from 1 through 64 characters long. :type job_name: str + :param jobs_list_output_files_options: Additional parameters for the + operation + :type jobs_list_output_files_options: + ~azure.mgmt.batchai.models.JobsListOutputFilesOptions :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :return: An iterator like instance of RemoteLoginInformation + :return: An iterator like instance of File :rtype: - ~azure.mgmt.batchai.models.RemoteLoginInformationPaged[~azure.mgmt.batchai.models.RemoteLoginInformation] + ~azure.mgmt.batchai.models.FilePaged[~azure.mgmt.batchai.models.File] :raises: :class:`CloudError` """ + outputdirectoryid = None + if jobs_list_output_files_options is not None: + outputdirectoryid = jobs_list_output_files_options.outputdirectoryid + directory = None + if jobs_list_output_files_options is not None: + directory = jobs_list_output_files_options.directory + linkexpiryinminutes = None + if jobs_list_output_files_options is not None: + linkexpiryinminutes = jobs_list_output_files_options.linkexpiryinminutes + max_results = None + if jobs_list_output_files_options is not None: + max_results = jobs_list_output_files_options.max_results + def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL - url = self.list_remote_login_information.metadata['url'] + url = self.list_output_files.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'experimentName': self._serialize.url("experiment_name", experiment_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) @@ -331,6 +492,13 @@ def internal_paging(next_link=None, raw=False): # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + query_parameters['outputdirectoryid'] = self._serialize.query("outputdirectoryid", outputdirectoryid, 'str') + if directory is not None: + query_parameters['directory'] = self._serialize.query("directory", directory, 'str') + if linkexpiryinminutes is not None: + query_parameters['linkexpiryinminutes'] = self._serialize.query("linkexpiryinminutes", linkexpiryinminutes, 'int', maximum=600, minimum=5) + if max_results is not None: + query_parameters['maxresults'] = self._serialize.query("max_results", max_results, 'int', maximum=1000, minimum=1) else: url = next_link @@ -359,132 +527,60 @@ def internal_paging(next_link=None, raw=False): return response # Deserialize response - deserialized = models.RemoteLoginInformationPaged(internal_paging, self._deserialize.dependencies) + deserialized = models.FilePaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} - client_raw_response = models.RemoteLoginInformationPaged(internal_paging, self._deserialize.dependencies, header_dict) + client_raw_response = models.FilePaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized - list_remote_login_information.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/jobs/{jobName}/listRemoteLoginInformation'} + list_output_files.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/experiments/{experimentName}/jobs/{jobName}/listOutputFiles'} - - def _terminate_initial( - self, resource_group_name, job_name, custom_headers=None, raw=False, **operation_config): - # Construct URL - url = self.terminate.metadata['url'] - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w\._]+$'), - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - - # Construct headers - header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) - - if response.status_code not in [200, 202]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - - def terminate( - self, resource_group_name, job_name, custom_headers=None, raw=False, polling=True, **operation_config): - """Terminates a job. + def list_remote_login_information( + self, resource_group_name, workspace_name, experiment_name, job_name, custom_headers=None, raw=False, **operation_config): + """Gets a list of currently existing nodes which were used for the Job + execution. The returned information contains the node ID, its public IP + and SSH port. :param resource_group_name: Name of the resource group to which the resource belongs. :type resource_group_name: str + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str + :param experiment_name: The name of the experiment. Experiment names + can only contain a combination of alphanumeric characters along with + dash (-) and underscore (_). The name must be from 1 through 64 + characters long. + :type experiment_name: str :param job_name: The name of the job within the specified resource group. Job names can only contain a combination of alphanumeric characters along with dash (-) and underscore (_). The name must be from 1 through 64 characters long. :type job_name: str :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns None or - ClientRawResponse if raw==True - :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] - :raises: :class:`CloudError` - """ - raw_result = self._terminate_initial( - resource_group_name=resource_group_name, - job_name=job_name, - custom_headers=custom_headers, - raw=True, - **operation_config - ) - - def get_long_running_output(response): - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - - lro_delay = operation_config.get( - 'long_running_operation_timeout', - self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - terminate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/jobs/{jobName}/terminate'} - - def list( - self, jobs_list_options=None, custom_headers=None, raw=False, **operation_config): - """Gets information about the jobs associated with the subscription. - - :param jobs_list_options: Additional parameters for the operation - :type jobs_list_options: ~azure.mgmt.batchai.models.JobsListOptions - :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :return: An iterator like instance of Job + :return: An iterator like instance of RemoteLoginInformation :rtype: - ~azure.mgmt.batchai.models.JobPaged[~azure.mgmt.batchai.models.Job] + ~azure.mgmt.batchai.models.RemoteLoginInformationPaged[~azure.mgmt.batchai.models.RemoteLoginInformation] :raises: :class:`CloudError` """ - filter = None - if jobs_list_options is not None: - filter = jobs_list_options.filter - select = None - if jobs_list_options is not None: - select = jobs_list_options.select - max_results = None - if jobs_list_options is not None: - max_results = jobs_list_options.max_results - def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL - url = self.list.metadata['url'] + url = self.list_remote_login_information.metadata['url'] path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'experimentName': self._serialize.url("experiment_name", experiment_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) @@ -492,12 +588,6 @@ def internal_paging(next_link=None, raw=False): # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - if filter is not None: - query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') - if select is not None: - query_parameters['$select'] = self._serialize.query("select", select, 'str') - if max_results is not None: - query_parameters['maxresults'] = self._serialize.query("max_results", max_results, 'int', maximum=1000, minimum=1) else: url = next_link @@ -514,7 +604,7 @@ def internal_paging(next_link=None, raw=False): header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters) + request = self._client.post(url, query_parameters) response = self._client.send( request, header_parameters, stream=False, **operation_config) @@ -526,203 +616,110 @@ def internal_paging(next_link=None, raw=False): return response # Deserialize response - deserialized = models.JobPaged(internal_paging, self._deserialize.dependencies) + deserialized = models.RemoteLoginInformationPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} - client_raw_response = models.JobPaged(internal_paging, self._deserialize.dependencies, header_dict) + client_raw_response = models.RemoteLoginInformationPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized - list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.BatchAI/jobs'} + list_remote_login_information.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/experiments/{experimentName}/jobs/{jobName}/listRemoteLoginInformation'} - def list_by_resource_group( - self, resource_group_name, jobs_list_by_resource_group_options=None, custom_headers=None, raw=False, **operation_config): - """Gets information about the Batch AI jobs associated within the - specified resource group. - :param resource_group_name: Name of the resource group to which the - resource belongs. - :type resource_group_name: str - :param jobs_list_by_resource_group_options: Additional parameters for - the operation - :type jobs_list_by_resource_group_options: - ~azure.mgmt.batchai.models.JobsListByResourceGroupOptions - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: An iterator like instance of Job - :rtype: - ~azure.mgmt.batchai.models.JobPaged[~azure.mgmt.batchai.models.Job] - :raises: :class:`CloudError` - """ - filter = None - if jobs_list_by_resource_group_options is not None: - filter = jobs_list_by_resource_group_options.filter - select = None - if jobs_list_by_resource_group_options is not None: - select = jobs_list_by_resource_group_options.select - max_results = None - if jobs_list_by_resource_group_options is not None: - max_results = jobs_list_by_resource_group_options.max_results - - def internal_paging(next_link=None, raw=False): - - if not next_link: - # Construct URL - url = self.list_by_resource_group.metadata['url'] - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - if filter is not None: - query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') - if select is not None: - query_parameters['$select'] = self._serialize.query("select", select, 'str') - if max_results is not None: - query_parameters['maxresults'] = self._serialize.query("max_results", max_results, 'int', maximum=1000, minimum=1) - - else: - url = next_link - query_parameters = {} - - # Construct headers - header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + def _terminate_initial( + self, resource_group_name, workspace_name, experiment_name, job_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.terminate.metadata['url'] + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'experimentName': self._serialize.url("experiment_name", experiment_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) - # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send( - request, header_parameters, stream=False, **operation_config) + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - return response + # Construct and send request + request = self._client.post(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) - # Deserialize response - deserialized = models.JobPaged(internal_paging, self._deserialize.dependencies) + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp if raw: - header_dict = {} - client_raw_response = models.JobPaged(internal_paging, self._deserialize.dependencies, header_dict) + client_raw_response = ClientRawResponse(None, response) return client_raw_response - return deserialized - list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/jobs'} - - def list_output_files( - self, resource_group_name, job_name, jobs_list_output_files_options, custom_headers=None, raw=False, **operation_config): - """List all directories and files inside the given directory of the output - directory (Only if the output directory is on Azure File Share or Azure - Storage container). + def terminate( + self, resource_group_name, workspace_name, experiment_name, job_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Terminates a job. :param resource_group_name: Name of the resource group to which the resource belongs. :type resource_group_name: str + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str + :param experiment_name: The name of the experiment. Experiment names + can only contain a combination of alphanumeric characters along with + dash (-) and underscore (_). The name must be from 1 through 64 + characters long. + :type experiment_name: str :param job_name: The name of the job within the specified resource group. Job names can only contain a combination of alphanumeric characters along with dash (-) and underscore (_). The name must be from 1 through 64 characters long. :type job_name: str - :param jobs_list_output_files_options: Additional parameters for the - operation - :type jobs_list_output_files_options: - ~azure.mgmt.batchai.models.JobsListOutputFilesOptions :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: An iterator like instance of File - :rtype: - ~azure.mgmt.batchai.models.FilePaged[~azure.mgmt.batchai.models.File] + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] :raises: :class:`CloudError` """ - outputdirectoryid = None - if jobs_list_output_files_options is not None: - outputdirectoryid = jobs_list_output_files_options.outputdirectoryid - directory = None - if jobs_list_output_files_options is not None: - directory = jobs_list_output_files_options.directory - linkexpiryinminutes = None - if jobs_list_output_files_options is not None: - linkexpiryinminutes = jobs_list_output_files_options.linkexpiryinminutes - max_results = None - if jobs_list_output_files_options is not None: - max_results = jobs_list_output_files_options.max_results - - def internal_paging(next_link=None, raw=False): - - if not next_link: - # Construct URL - url = self.list_output_files.metadata['url'] - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), - 'jobName': self._serialize.url("job_name", job_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w\._]+$'), - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - query_parameters['outputdirectoryid'] = self._serialize.query("outputdirectoryid", outputdirectoryid, 'str') - if directory is not None: - query_parameters['directory'] = self._serialize.query("directory", directory, 'str') - if linkexpiryinminutes is not None: - query_parameters['linkexpiryinminutes'] = self._serialize.query("linkexpiryinminutes", linkexpiryinminutes, 'int', maximum=600, minimum=5) - if max_results is not None: - query_parameters['maxresults'] = self._serialize.query("max_results", max_results, 'int', maximum=1000, minimum=1) - - else: - url = next_link - query_parameters = {} - - # Construct headers - header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send( - request, header_parameters, stream=False, **operation_config) - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - return response - - # Deserialize response - deserialized = models.FilePaged(internal_paging, self._deserialize.dependencies) + raw_result = self._terminate_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + experiment_name=experiment_name, + job_name=job_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) - if raw: - header_dict = {} - client_raw_response = models.FilePaged(internal_paging, self._deserialize.dependencies, header_dict) - return client_raw_response + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response - return deserialized - list_output_files.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/jobs/{jobName}/listOutputFiles'} + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + terminate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/experiments/{experimentName}/jobs/{jobName}/terminate'} diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/operations/operations.py b/azure-mgmt-batchai/azure/mgmt/batchai/operations/operations.py index 3bca81c3fa99..6d39bdbddf1b 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/operations/operations.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/operations/operations.py @@ -23,7 +23,7 @@ class Operations(object): :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. - :ivar api_version: Specifies the version of API used for this request. Constant value: "2018-03-01". + :ivar api_version: Specifies the version of API used for this request. Constant value: "2018-05-01". """ models = models @@ -33,7 +33,7 @@ def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer - self.api_version = "2018-03-01" + self.api_version = "2018-05-01" self.config = config diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/operations/usage_operations.py b/azure-mgmt-batchai/azure/mgmt/batchai/operations/usages_operations.py similarity index 96% rename from azure-mgmt-batchai/azure/mgmt/batchai/operations/usage_operations.py rename to azure-mgmt-batchai/azure/mgmt/batchai/operations/usages_operations.py index f304da14cf69..d47c494e5c48 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/operations/usage_operations.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/operations/usages_operations.py @@ -16,14 +16,14 @@ from .. import models -class UsageOperations(object): - """UsageOperations operations. +class UsagesOperations(object): + """UsagesOperations operations. :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. - :ivar api_version: Specifies the version of API used for this request. Constant value: "2018-03-01". + :ivar api_version: Specifies the version of API used for this request. Constant value: "2018-05-01". """ models = models @@ -33,7 +33,7 @@ def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer - self.api_version = "2018-03-01" + self.api_version = "2018-05-01" self.config = config diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/operations/workspaces_operations.py b/azure-mgmt-batchai/azure/mgmt/batchai/operations/workspaces_operations.py new file mode 100644 index 000000000000..e9bb22110dea --- /dev/null +++ b/azure-mgmt-batchai/azure/mgmt/batchai/operations/workspaces_operations.py @@ -0,0 +1,453 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling + +from .. import models + + +class WorkspacesOperations(object): + """WorkspacesOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: Specifies the version of API used for this request. Constant value: "2018-05-01". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2018-05-01" + + self.config = config + + def list( + self, workspaces_list_options=None, custom_headers=None, raw=False, **operation_config): + """Gets a list of Workspaces associated with the given subscription. + + :param workspaces_list_options: Additional parameters for the + operation + :type workspaces_list_options: + ~azure.mgmt.batchai.models.WorkspacesListOptions + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of Workspace + :rtype: + ~azure.mgmt.batchai.models.WorkspacePaged[~azure.mgmt.batchai.models.Workspace] + :raises: :class:`CloudError` + """ + max_results = None + if workspaces_list_options is not None: + max_results = workspaces_list_options.max_results + + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + if max_results is not None: + query_parameters['maxresults'] = self._serialize.query("max_results", max_results, 'int', maximum=1000, minimum=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.WorkspacePaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.WorkspacePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.BatchAI/workspaces'} + + def list_by_resource_group( + self, resource_group_name, workspaces_list_by_resource_group_options=None, custom_headers=None, raw=False, **operation_config): + """Gets a list of Workspaces within the specified resource group. + + :param resource_group_name: Name of the resource group to which the + resource belongs. + :type resource_group_name: str + :param workspaces_list_by_resource_group_options: Additional + parameters for the operation + :type workspaces_list_by_resource_group_options: + ~azure.mgmt.batchai.models.WorkspacesListByResourceGroupOptions + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of Workspace + :rtype: + ~azure.mgmt.batchai.models.WorkspacePaged[~azure.mgmt.batchai.models.Workspace] + :raises: :class:`CloudError` + """ + max_results = None + if workspaces_list_by_resource_group_options is not None: + max_results = workspaces_list_by_resource_group_options.max_results + + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + if max_results is not None: + query_parameters['maxresults'] = self._serialize.query("max_results", max_results, 'int', maximum=1000, minimum=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.WorkspacePaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.WorkspacePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces'} + + + def _create_initial( + self, resource_group_name, workspace_name, location, tags=None, custom_headers=None, raw=False, **operation_config): + parameters = models.WorkspaceCreateParameters(location=location, tags=tags) + + # Construct URL + url = self.create.metadata['url'] + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'WorkspaceCreateParameters') + + # Construct and send request + request = self._client.put(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('Workspace', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def create( + self, resource_group_name, workspace_name, location, tags=None, custom_headers=None, raw=False, polling=True, **operation_config): + """Creates a Workspace. + + :param resource_group_name: Name of the resource group to which the + resource belongs. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str + :param location: The region in which to create the Workspace. + :type location: str + :param tags: The user specified tags associated with the Workspace. + :type tags: dict[str, str] + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns Workspace or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.batchai.models.Workspace] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.batchai.models.Workspace]] + :raises: :class:`CloudError` + """ + raw_result = self._create_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + location=location, + tags=tags, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('Workspace', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}'} + + + def _delete_initial( + self, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) + + if response.status_code not in [200, 202, 204]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def delete( + self, resource_group_name, workspace_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Deletes a Workspace. + + :param resource_group_name: Name of the resource group to which the + resource belongs. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: :class:`CloudError` + """ + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}'} + + def get( + self, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Gets information about a Workspace. + + :param resource_group_name: Name of the resource group to which the + resource belongs. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Workspace names can + only contain a combination of alphanumeric characters along with dash + (-) and underscore (_). The name must be from 1 through 64 characters + long. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: Workspace or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.batchai.models.Workspace or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', pattern=r'^[-\w\._]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w_]+$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('Workspace', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}'} diff --git a/azure-mgmt-batchai/azure/mgmt/batchai/version.py b/azure-mgmt-batchai/azure/mgmt/batchai/version.py index 44e69c49c178..5bfc801ce220 100644 --- a/azure-mgmt-batchai/azure/mgmt/batchai/version.py +++ b/azure-mgmt-batchai/azure/mgmt/batchai/version.py @@ -9,5 +9,5 @@ # regenerated. # -------------------------------------------------------------------------- -VERSION = "1.0.1" +VERSION = "2018-05-01"