Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[AutoPR batchai/resource-manager] BatchAI. Several fixes #2548

Merged
merged 2 commits into from
May 16, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
# regenerated.
# --------------------------------------------------------------------------

from .resource import Resource
from .resource_py3 import Resource


class Cluster(Resource):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
# regenerated.
# --------------------------------------------------------------------------

from .proxy_resource import ProxyResource
from .proxy_resource_py3 import ProxyResource


class Experiment(ProxyResource):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
# regenerated.
# --------------------------------------------------------------------------

from .resource import Resource
from .resource_py3 import Resource


class FileServer(Resource):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ class JobCreateParameters(Model):
All required parameters must be populated in order to send to Azure.

:param scheduling_priority: Scheduling priority associated with the job.
Scheduling priority associated with the job. Possible values include:
Scheduling priority associated with the job. Possible values include:
'low', 'normal', 'high'. Default value: "normal" .
:type scheduling_priority: str or ~azure.mgmt.batchai.models.JobPriority
:param cluster: Required. Specifies the Id of the cluster on which this
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ class JobCreateParameters(Model):
All required parameters must be populated in order to send to Azure.

:param scheduling_priority: Scheduling priority associated with the job.
Scheduling priority associated with the job. Possible values include:
Scheduling priority associated with the job. Possible values include:
'low', 'normal', 'high'. Default value: "normal" .
:type scheduling_priority: str or ~azure.mgmt.batchai.models.JobPriority
:param cluster: Required. Specifies the Id of the cluster on which this
Expand Down
2 changes: 1 addition & 1 deletion azure-mgmt-batchai/azure/mgmt/batchai/models/job_py3.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
# regenerated.
# --------------------------------------------------------------------------

from .proxy_resource import ProxyResource
from .proxy_resource_py3 import ProxyResource


class Job(ProxyResource):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
# regenerated.
# --------------------------------------------------------------------------

from .resource import Resource
from .resource_py3 import Resource


class Workspace(Resource):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def internal_paging(next_link=None, raw=False):


def _create_initial(
self, resource_group_name, workspace_name, experiment_name, parameters, custom_headers=None, raw=False, **operation_config):
self, resource_group_name, workspace_name, experiment_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.create.metadata['url']
path_format_arguments = {
Expand All @@ -151,13 +151,9 @@ def _create_initial(
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')

# Construct body
body_content = self._serialize.body(parameters, 'object')

# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
response = self._client.send(request, header_parameters, stream=False, **operation_config)

if response.status_code not in [200, 202]:
exp = CloudError(response)
Expand All @@ -176,7 +172,7 @@ def _create_initial(
return deserialized

def create(
self, resource_group_name, workspace_name, experiment_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config):
self, resource_group_name, workspace_name, experiment_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""Creates an Experiment.

:param resource_group_name: Name of the resource group to which the
Expand All @@ -192,9 +188,6 @@ def create(
dash (-) and underscore (_). The name must be from 1 through 64
characters long.
:type experiment_name: str
:param parameters: The parameters to provide for the experiment
creation.
:type parameters: object
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
Expand All @@ -212,7 +205,6 @@ def create(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
experiment_name=experiment_name,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
Expand Down